The Effect of 401(k) Eligibility on Net Financial Assets
Data
hdm
パッケージに含まれており、ダウンロード可能である
# install.packages("renv")
# renv::restore()
# remotes::install_github("mlr-org/mlr3extralearners", force = TRUE) # needed to run boosting
pacman::p_load(
xtable,
hdm,
sandwich,
ggplot2,
randomForest,
data.table,
glmnet,
rpart,
gbm,
DoubleML,
mlr3learners,
mlr3,
data.table,
randomForest,
ranger,
mlr3extralearners,
mboost
)
data(pension)
data <- pension
dim(data)
[1] 9915 44
help(pension)
hist_e401 <- ggplot(data, aes(x = e401, fill = factor(e401))) +
geom_bar()
hist_e401

dens_net_tfa <- ggplot(data, aes(x = net_tfa, color = factor(e401), fill = factor(e401))) +
geom_density() +
xlim(c(-20000, 150000)) +
facet_wrap(. ~ e401)
dens_net_tfa

e1 <- data[data$e401 == 1, ]
e0 <- data[data$e401 == 0, ]
round(mean(e1$net_tfa) - mean(e0$net_tfa), 0)
[1] 19559
p1 <- data[data$p401 == 1, ]
p0 <- data[data$p401 == 0, ]
round(mean(p1$net_tfa) - mean(p0$net_tfa), 0)
[1] 27372
# outcome variable
y <- data[, "net_tfa"]
# treatment variable
D <- data[, "e401"]
D2 <- data[, "p401"]
D3 <- data[, "a401"]
columns_to_drop <- c(
"e401", "p401", "a401", "tw", "tfa", "net_tfa", "tfa_he",
"hval", "hmort", "hequity",
"nifa", "net_nifa", "net_n401", "ira",
"dum91", "icat", "ecat", "zhat",
"i1", "i2", "i3", "i4", "i5", "i6", "i7",
"a1", "a2", "a3", "a4", "a5"
)
# covariates
X <- data[, !(names(data) %in% columns_to_drop)]
# Constructing the controls
x_formula <- paste("~ 0 + poly(age, 6, raw=TRUE) + poly(inc, 8, raw=TRUE) + poly(educ, 4, raw=TRUE) ",
"+ poly(fsize, 2, raw=TRUE) + male + marr + twoearn + db + pira + hown")
X <- as.data.table(model.frame(x_formula, X))
head(X)
Partiall Linear Model (PLM)
dml2_for_plm <- function(x, d, y, dreg, yreg, nfold = 3, method = "regression") {
nobs <- nrow(x) # number of observations
foldid <- rep.int(1:nfold, times = ceiling(nobs / nfold))[sample.int(nobs)] # define folds indices
I <- split(1:nobs, foldid) # split observation indices into folds
ytil <- dtil <- rep(NA, nobs)
cat("fold: ")
for (b in seq_along(I)) {
if (method == "regression") {
dfit <- dreg(x[-I[[b]], ], d[-I[[b]]]) # take a fold out
yfit <- yreg(x[-I[[b]], ], y[-I[[b]]]) # take a foldt out
dhat <- predict(dfit, x[I[[b]], ], type = "response") # predict the left-out fold
yhat <- predict(yfit, x[I[[b]], ], type = "response") # predict the left-out fold
dtil[I[[b]]] <- (d[I[[b]]] - dhat) # record residual for the left-out fold
ytil[I[[b]]] <- (y[I[[b]]] - yhat) # record residial for the left-out fold
} else if (method == "randomforest") {
dfit <- dreg(x[-I[[b]], ], as.factor(d)[-I[[b]]]) # take a fold out
yfit <- yreg(x[-I[[b]], ], y[-I[[b]]]) # take a fold out
dhat <- predict(dfit, x[I[[b]], ], type = "prob")[, 2] # predict the left-out fold
yhat <- predict(yfit, x[I[[b]], ], type = "response") # predict the left-out fold
dtil[I[[b]]] <- (d[I[[b]]] - dhat) # record residual for the left-out fold
ytil[I[[b]]] <- (y[I[[b]]] - yhat) # record residial for the left-out fold
} else if (method == "decisiontrees") {
dfit <- dreg(x[-I[[b]], ], as.factor(d)[-I[[b]]]) # take a fold out
yfit <- yreg(x[-I[[b]], ], y[-I[[b]]]) # take a fold out
dhat <- predict(dfit, x[I[[b]], ])[, 2] # predict the left-out fold
yhat <- predict(yfit, x[I[[b]], ]) # predict the left-out fold
dtil[I[[b]]] <- (d[I[[b]]] - dhat) # record residual for the left-out fold
ytil[I[[b]]] <- (y[I[[b]]] - yhat) # record residial for the left-out fold
} else if (method == "boostedtrees") {
dfit <- dreg(x[-I[[b]], ], d[-I[[b]]]) # take a fold out
yfit <- yreg(x[-I[[b]], ], y[-I[[b]]]) # take a fold out
dhat <- predict(dfit, x[I[[b]], ], type = "response") # predict the left-out fold
yhat <- predict(yfit, x[I[[b]], ], type = "response") # predict the left-out fold
dtil[I[[b]]] <- (d[I[[b]]] - dhat) # record residual for the left-out fold
ytil[I[[b]]] <- (y[I[[b]]] - yhat) # record residial for the left-out fold
}
cat(b, " ")
}
rfit <- lm(ytil ~ dtil) # estimate the main parameter by regressing one residual on the other
coef_est <- coef(rfit)[2] # extract coefficient
se <- sqrt(vcovHC(rfit)[2, 2]) # record robust standard error
cat(sprintf("\ncoef (se) = %g (%g)\n", coef_est, se)) # printing output
return(list(coef_est = coef_est, se = se, dtil = dtil, ytil = ytil)) # save output and residuals
}
summaryPLR <- function(point, stderr, resD, resy, name) {
data <- data.frame(
estimate = point, # point estimate
stderr = stderr, # standard error
lower = point - 1.96 * stderr, # lower end of 95% confidence interval
upper = point + 1.96 * stderr, # upper end of 95% confidence interval
`rmse y` = sqrt(mean(resy^2)), # RMSE of model that predicts outcome y
`rmse D` = sqrt(mean(resD^2)), # RMSE of model that predicts treatment D
`accuracy D` = mean(abs(resD) < 0.5) # binary classification accuracy of model for D
)
rownames(data) <- name
return(data)
}
Double Lasso
- 処置の推定において、関数型を線形確率モデルにする場合
# DML with LassoCV
set.seed(123)
cat(sprintf("\nDML with Lasso CV \n"))
DML with Lasso CV
dreg_lasso_cv <- function(x, d) {
cv.glmnet(x, d, family = "gaussian", alpha = 1, nfolds = 5)
}
yreg_lasso_cv <- function(x, y) {
cv.glmnet(x, y, family = "gaussian", alpha = 1, nfolds = 5)
}
dml2_results <- dml2_for_plm(as.matrix(X), D, y, dreg_lasso_cv, yreg_lasso_cv, nfold = 5)
fold: 1 2 3 4 5
coef (se) = 8954.5 (1483.1)
sum_lasso_cv <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
dml2_results$ytil, name = "LassoCV")
tableplr <- data.frame()
tableplr <- rbind(sum_lasso_cv)
tableplr
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_lasso <- D - dml2_results$dtil
yhat_lasso <- y - dml2_results$ytil
- 処置の推定において、Logistic 回帰を用いる場合
# DML with Lasso/Logistic
set.seed(123)
cat(sprintf("\nDML with Lasso/Logistic \n"))
DML with Lasso/Logistic
dreg_logistic_cv <- function(x, d) {
cv.glmnet(x, d, family = "binomial", alpha = 0, nfolds = 5)
}
yreg_lasso_cv <- function(x, y) {
cv.glmnet(x, y, family = "gaussian", alpha = 1, nfolds = 5)
}
dml2_results <- dml2_for_plm(as.matrix(X), D, y, dreg_logistic_cv, yreg_lasso_cv, nfold = 5)
fold: 1 2 3 4 5
coef (se) = 9418.17 (1476.17)
sum_lasso_logistic_cv <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
dml2_results$ytil, name = "LassoCV/LogisticCV")
tableplr <- rbind(tableplr, sum_lasso_logistic_cv)
tableplr
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_lasso_logistic <- D - dml2_results$dtil
yhat_lasso_logistic <- y - dml2_results$ytil
Random Forest
# DML with Random Forest
set.seed(123)
cat(sprintf("\nDML with Random Forest \n"))
DML with Random Forest
dreg_rf <- function(x, d) {
randomForest(x, d, ntree = 1000, nodesize = 10)
} # ML method=Forest
yreg_rf <- function(x, y) {
randomForest(x, y, ntree = 1000, nodesize = 10)
} # ML method=Forest
dml2_results <- dml2_for_plm(as.matrix(X), D, y, dreg_rf, yreg_rf, nfold = 5, method = "randomforest")
fold: 1 2 3 4 5
coef (se) = 9112 (1281.08)
sum_rf <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
dml2_results$ytil, name = "Random Forest")
tableplr <- rbind(tableplr, sum_rf)
tableplr
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_rf <- D - dml2_results$dtil
dhat_rf <- y - dml2_results$ytil
Decision Trees
# DML with Decision Trees
set.seed(123)
cat(sprintf("\nDML with Decision Trees \n"))
DML with Decision Trees
dreg_tr <- function(x, d) {
rpart(as.formula("D~."), cbind(data.frame(D = d), x), method = "class", minbucket = 10, cp = 0.001)
}
dreg_tr <- function(x, y) {
rpart(as.formula("y~."), cbind(data.frame(y = y), x), minbucket = 10, cp = 0.001)
}
# decision tree takes in X as dataframe, not matrix/array
dml2_results <- dml2_for_plm(X, D, y, dreg_tr, dreg_tr, nfold = 5, method = "decisiontrees")
fold: 1 2 3 4 5
coef (se) = 8633.9 (1302.95)
sum_tr <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
dml2_results$ytil, name = "Decision Trees")
tableplr <- rbind(tableplr, sum_tr)
tableplr
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_tr <- D - dml2_results$dtil
yhat_tr <- y - dml2_results$ytil
Boosted Trees
# DML with Boosted Trees
set.seed(123)
cat(sprintf("\nDML with Boosted Trees \n"))
DML with Boosted Trees
# NB: early stopping cannot easily be implemented with gbm
## set n.trees = best, where best <- gbm.perf(dreg_boost, plot.it = FALSE)
dreg_boost <- function(x, d) {
gbm(as.formula("D~."), cbind(data.frame(D = d), x), distribution = "bernoulli",
interaction.depth = 2, n.trees = 100, shrinkage = .1)
}
yreg_boost <- function(x, y) {
gbm(as.formula("y~."), cbind(data.frame(y = y), x), distribution = "gaussian",
interaction.depth = 2, n.trees = 100, shrinkage = .1)
}
# passing these through regression as type="response", and D should not be factor!
dml2_results <- dml2_for_plm(X, D, y, dreg_boost, yreg_boost, nfold = 5, method = "boostedtrees")
fold:
Using 100 trees...
Using 100 trees...
1
Using 100 trees...
Using 100 trees...
2
Using 100 trees...
Using 100 trees...
3
Using 100 trees...
Using 100 trees...
4
Using 100 trees...
Using 100 trees...
5
coef (se) = 8859.28 (1321.43)
sum_boost <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
dml2_results$ytil, name = "Boosted Trees")
tableplr <- rbind(tableplr, sum_boost)
tableplr
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_boost <- D - dml2_results$dtil
yhat_boost <- y - dml2_results$ytil
Ensemble
# Best fit is boosted trees for both D and Y
sum_best <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
dml2_results$ytil, name = "Best")
tableplr <- rbind(tableplr, sum_best)
tableplr
# Least squares model average
ma_dtil <- lm(D ~ dhat_lasso + dhat_lasso_logistic + dhat_rf + dhat_tr + dhat_boost)$residuals
ma_ytil <- lm(y ~ yhat_lasso + yhat_lasso_logistic + dhat_rf + yhat_tr + yhat_boost)$residuals
rfit <- lm(ma_ytil ~ ma_dtil) # estimate the main parameter by regressing one residual on the other
coef_est <- coef(rfit)[2] # extract coefficient
se <- sqrt(vcovHC(rfit)[2, 2]) # record robust standard error
sum.ma <- summaryPLR(coef_est, se, ma_dtil, ma_ytil, name = "Model Average")
tableplr <- rbind(tableplr, sum.ma)
tableplr
Interactive Regression Model (IRM)
dml2_for_irm <- function(x, d, y, dreg, yreg0, yreg1, trimming = 0.01, nfold = 5, method = "regression") {
yhat0 <- rep(0, length(y))
yhat1 <- rep(0, length(y))
Dhat <- rep(0, length(d))
nobs <- nrow(x) # number of observations
foldid <- rep.int(1:nfold, times = ceiling(nobs / nfold))[sample.int(nobs)] # define folds indices
I <- split(1:nobs, foldid) # split observation indices into folds
ytil <- dtil <- rep(NA, nobs)
cat("fold: ")
for (b in seq_along(I)) {
# define helpful variables
Dnotb <- d[-I[[b]]]
Xb <- X[I[[b]], ]
Xnotb <- X[-I[[b]], ]
# training dfs subsetted on the -I[[b]] fold
XD0 <- X[-I[[b]], ][d[-I[[b]]] == 0]
yD0 <- y[-I[[b]]][d[-I[[b]]] == 0]
XD1 <- X[-I[[b]], ][d[-I[[b]]] == 1]
yD1 <- y[-I[[b]]][d[-I[[b]]] == 1]
if (method == "regression") {
yfit0 <- yreg0(as.matrix(XD0), yD0)
yfit1 <- yreg1(as.matrix(XD1), yD1)
yhat0[I[[b]]] <- predict(yfit0, as.matrix(Xb)) # default is type = "response" for glmnet family gaussian
yhat1[I[[b]]] <- predict(yfit1, as.matrix(Xb))
} else if (method == "randomforest") {
yfit0 <- yreg0(XD0, yD0)
yfit1 <- yreg1(XD1, yD1)
yhat0[I[[b]]] <- predict(yfit0, Xb) # default is type = "response" for rf
yhat1[I[[b]]] <- predict(yfit1, Xb)
} else if (method == "decisiontrees") {
yfit0 <- yreg0(XD0, yD0)
yfit1 <- yreg1(XD1, yD1)
yhat0[I[[b]]] <- predict(yfit0, Xb) # default is type = "vector" for decision
yhat1[I[[b]]] <- predict(yfit1, Xb)
} else if (method == "boostedtrees") {
yfit0 <- yreg0(as.data.frame(XD0), yD0)
yfit1 <- yreg1(as.data.frame(XD1), yD1)
yhat0[I[[b]]] <- predict(yfit0, Xb) # default is type = "response" for boosted
yhat1[I[[b]]] <- predict(yfit1, Xb)
}
# propensity scores:
if (method == "regression") {
dfit_b <- dreg(as.matrix(Xnotb), Dnotb)
dhat_b <- predict(dfit_b, as.matrix(Xb), type = "response") # default is type="link" for family binomial!
} else if (method == "randomforest") {
dfit_b <- dreg(Xnotb, as.factor(Dnotb))
dhat_b <- predict(dfit_b, Xb, type = "prob")[, 2]
} else if (method == "decisiontrees") {
dfit_b <- dreg(Xnotb, Dnotb)
dhat_b <- predict(dfit_b, Xb)[, 2]
} else if (method == "boostedtrees") {
dfit_b <- dreg(as.data.frame(Xnotb), Dnotb)
dhat_b <- predict(dfit_b, Xb, type = "response")
}
dhat_b <- pmax(pmin(dhat_b, 1 - trimming), trimming) # trimming so scores are between [trimming, (1-trimming)]
Dhat[I[[b]]] <- dhat_b
cat(b, " ")
}
# Prediction of treatment and outcome for observed instrument
yhat <- yhat0 * (1 - D) + yhat1 * D
# residuals
ytil <- y - yhat
dtil <- D - Dhat
# doubly robust quantity for every sample
drhat <- yhat1 - yhat0 + (y - yhat) * (D / Dhat - (1 - D) / (1 - Dhat))
coef_est <- mean(drhat)
vari <- var(drhat)
se <- sqrt(vari / nrow(X))
cat("point", coef_est)
cat("se", se)
return(list(coef_est = coef_est, se = se, ytil = ytil, dtil = dtil, drhat = drhat,
yhat0 = yhat0, yhat1 = yhat1, dhat = Dhat, yhat = yhat))
}
summaryIRM <- function(coef_est, se, ytil, dtil, drhat, name) {
summary_data <- data.frame(
estimate = coef_est, # point estimate
se = se, # standard error
lower = coef_est - 1.96 * se, # lower end of 95% confidence interval
upper = coef_est + 1.96 * se, # upper end of 95% confidence interval
rmse_y = sqrt(mean(ytil^2)), # res of model that predicts outcome y
rmse_D = sqrt(mean(dtil^2)), # res of model that predicts treatment D
accuracy_D = mean(abs(dtil) < 0.5) # binary classification accuracy of model for D
)
row.names(summary_data) <- name
return(summary_data)
}
Double Lasso
# DML with Lasso/Logistic
set.seed(123)
cat(sprintf("\nDML with LassoCV/Logistic \n"))
DML with LassoCV/Logistic
dreg_lasso_cv <- function(x, d) {
cv.glmnet(x, d, family = "binomial", alpha = 0, nfolds = 5)
}
yreg0_lasso_cv <- function(x, y) {
cv.glmnet(x, y, family = "gaussian", alpha = 1, nfolds = 5)
}
yreg1_lasso_cv <- function(x, y) {
cv.glmnet(x, y, family = "gaussian", alpha = 1, nfolds = 5)
}
# more folds seems to help stabilize finite sample performance
dml2_results <- dml2_for_irm(X, D, y, dreg_lasso_cv, yreg0_lasso_cv, yreg1_lasso_cv, nfold = 5)
fold: 1 2 3 4 5 point 8860.207se 1347.311
sum_lasso_cv <- summaryIRM(dml2_results$coef_est, dml2_results$se, dml2_results$ytil, dml2_results$dtil,
dml2_results$drhat, name = "LassoCVLogistic")
tableirm <- data.frame()
tableirm <- rbind(sum_lasso_cv)
tableirm
yhat0_lasso <- dml2_results$yhat0
yhat1_lasso <- dml2_results$yhat1
dhat_lasso <- dml2_results$dhat
yhat_lasso <- dml2_results$yhat
Random Forest
# DML with Random Forest
set.seed(123)
cat(sprintf("\nDML with Random Forest \n"))
DML with Random Forest
dreg_rf <- function(x, d) {
randomForest(x, d, ntree = 1000, nodesize = 10)
} # ML method=Forest
yreg0_rf <- function(x, y) {
randomForest(x, y, ntree = 1000, nodesize = 10)
} # ML method=Forest
yreg1_rf <- function(x, y) {
randomForest(x, y, ntree = 1000, nodesize = 10)
} # ML method=Forest
dml2_results <- dml2_for_irm(as.matrix(X), D, y, dreg_rf, yreg0_rf, yreg1_rf, nfold = 5, method = "randomforest")
fold: 1 2 3 4 5 point 8349.154se 1502.248
sum_rf <- summaryIRM(dml2_results$coef_est, dml2_results$se, dml2_results$ytil, dml2_results$dtil,
dml2_results$drhat, name = "Random Forest")
tableirm <- rbind(tableirm, sum_rf)
tableirm
yhat0_rf <- dml2_results$yhat0
yhat1_rf <- dml2_results$yhat1
dhat_rf <- dml2_results$dhat
dhat_rf <- dml2_results$yhat
Decision Trees
# DML with Decision Trees
set.seed(123)
cat(sprintf("\nDML with Decision Trees \n"))
DML with Decision Trees
dreg_tr <- function(x, d) {
rpart(as.formula("D~."), cbind(data.frame(D = d), x), method = "class", minbucket = 10, cp = 0.001)
}
yreg0_tr <- function(x, y) {
rpart(as.formula("y~."), cbind(data.frame(y = y), x), minbucket = 10, cp = 0.001)
}
yreg1_tr <- function(x, y) {
rpart(as.formula("y~."), cbind(data.frame(y = y), x), minbucket = 10, cp = 0.001)
}
dml2_results <- dml2_for_irm(X, D, y, dreg_tr, yreg0_tr, yreg1_tr, nfold = 5, method = "decisiontrees")
fold: 1 2 3 4 5 point 7856.397se 1249.771
sum_tr <- summaryIRM(dml2_results$coef_est, dml2_results$se, dml2_results$ytil, dml2_results$dtil,
dml2_results$drhat, name = "Decision Trees")
tableirm <- rbind(tableirm, sum_tr)
tableirm
yhat0_tr <- dml2_results$yhat0
yhat1_tr <- dml2_results$yhat1
dhat_tr <- dml2_results$dhat
yhat_tr <- dml2_results$yhat
Boosted Trees
# DML with Boosted Trees
set.seed(123)
cat(sprintf("\nDML with Boosted Trees \n"))
DML with Boosted Trees
# NB: early stopping cannot easily be implemented with gbm
## set n.trees = best, where best <- gbm.perf(dreg_boost, plot.it = FALSE)
dreg_boost <- function(x, d) {
gbm(as.formula("D~."), cbind(data.frame(D = d), x), distribution = "bernoulli",
interaction.depth = 2, n.trees = 100, shrinkage = .1)
}
yreg0_boost <- function(x, y) {
gbm(as.formula("y~."), cbind(data.frame(y = y), x), distribution = "gaussian",
interaction.depth = 2, n.trees = 100, shrinkage = .1)
}
yreg1_boost <- function(x, y) {
gbm(as.formula("y~."), cbind(data.frame(y = y), x), distribution = "gaussian",
interaction.depth = 2, n.trees = 100, shrinkage = .1)
}
# passing these through regression as type="response", and D should not be factor!
dml2_results <- dml2_for_irm(X, D, y, dreg_boost, yreg0_boost, yreg1_boost, nfold = 5, method = "boostedtrees")
fold:
Using 100 trees...
Using 100 trees...
Using 100 trees...
1
Using 100 trees...
Using 100 trees...
Using 100 trees...
2
Using 100 trees...
Using 100 trees...
Using 100 trees...
3
Using 100 trees...
Using 100 trees...
Using 100 trees...
4
Using 100 trees...
Using 100 trees...
Using 100 trees...
5 point 7871.119se 1156.693
sum_boost <- summaryIRM(dml2_results$coef_est, dml2_results$se, dml2_results$ytil, dml2_results$dtil,
dml2_results$drhat, name = "Boosted Trees")
tableirm <- rbind(tableirm, sum_boost)
tableirm
yhat0_boost <- dml2_results$yhat0
yhat1_boost <- dml2_results$yhat1
dhat_boost <- dml2_results$dhat
yhat_boost <- dml2_results$yhat
Ensemble
# Ensembles
# Best
# We'll look at model that does best for Y overall. Could also use different model for Y0 and Y1
# Here, the best performance for Y is the random forest and for D the boosted tree
# residuals
ytil <- y - dhat_rf
dtil <- D - dhat_boost
# doubly robust quantity for every sample
drhat <- yhat1_rf - yhat0_rf + (y - dhat_rf) * (D / dhat_boost - (1 - D) / (1 - dhat_boost))
coef_est <- mean(drhat)
vari <- var(drhat)
se <- sqrt(vari / nrow(X))
sum_best <- summaryIRM(coef_est, se, ytil, dtil, drhat, name = "Best")
tableirm <- rbind(tableirm, sum_best)
tableirm
# Least squares model average
# We'll look at weights that do best job for Y overall. Could also use different weights for Y0 and Y1
ma_dw <- lm(D ~ dhat_lasso + dhat_rf + dhat_tr + dhat_boost)$coef
ma_yw <- lm(y ~ yhat_lasso + dhat_rf + yhat_tr + yhat_boost)$coef
Dhats <- cbind(as.matrix(rep(1, nrow(X))), dhat_lasso, dhat_rf, dhat_tr, dhat_boost)
Y0s <- cbind(as.matrix(rep(1, nrow(X))), yhat0_lasso, yhat0_rf, yhat0_tr, yhat0_boost)
Y1s <- cbind(as.matrix(rep(1, nrow(X))), yhat1_lasso, yhat1_rf, yhat1_tr, yhat1_boost)
dhat <- Dhats %*% as.matrix(ma_dw)
yhat0 <- Y0s %*% as.matrix(ma_yw)
yhat1 <- Y1s %*% as.matrix(ma_yw)
# Prediction of treatment and outcome for observed instrument
yhat <- yhat0 * (1 - D) + yhat1 * D
# residuals
ytil <- y - yhat
dtil <- D - dhat
# doubly robust quantity for every sample
drhat <- yhat1 - yhat0 + (y - yhat) * (D / dhat - (1 - D) / (1 - dhat))
coef_est <- mean(drhat)
vari <- var(drhat)
se <- sqrt(vari / nrow(X))
sum.ma <- summaryIRM(coef_est, se, ytil, dtil, drhat, name = "Model Average")
tableirm <- rbind(tableirm, sum.ma)
tableirm
DoubleML package による推定
- 今までは自ら関数を書いていたが、
EconML
(Python) や
DoubleML
(R & Python) などのパッケージが利用可能である
データ作成
# Constructing the data (as DoubleMLData)
formula_flex <- paste("net_tfa ~ e401 + poly(age, 6, raw=TRUE) + poly(inc, 8, raw=TRUE) ",
"+ poly(educ, 4, raw=TRUE) + poly(fsize, 2, raw=TRUE) + marr + twoearn + db + pira + hown")
model_flex <- as.data.table(model.frame(formula_flex, pension))
x_cols <- colnames(model_flex)[-c(1, 2)]
data_ml <- DoubleMLData$new(model_flex, y_col = "net_tfa", d_cols = "e401", x_cols = x_cols)
p <- dim(model_flex)[2] - 2
p
[1] 25
Partiall Linear Model (PLM)
# Estimating the PLR
lgr::get_logger("mlr3")$set_threshold("warn")
lasso <- lrn("regr.cv_glmnet", nfolds = 5, s = "lambda.min")
lasso_class <- lrn("classif.cv_glmnet", nfolds = 5, s = "lambda.min")
dml_plr <- DoubleMLPLR$new(data_ml, ml_l = lasso, ml_m = lasso_class, n_folds = 5)
dml_plr$fit(store_predictions = TRUE)
dml_plr$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 8829 1300 6.793 1.1e-11 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
lasso_plr <- dml_plr$coef
lasso_std_plr <- dml_plr$se
dml_plr$params_names()
[1] "ml_l" "ml_m"
g_hat <- as.matrix(dml_plr$predictions$ml_l) # predictions of g_o
m_hat <- as.matrix(dml_plr$predictions$ml_m) # predictions of m_o
# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
theta <- as.numeric(dml_plr$coef) # estimated regression coefficient
d <- as.matrix(pension$e401)
predictions_y <- as.matrix(d * theta) + g_hat # predictions for y
lasso_y_rmse <- sqrt(mean((y - predictions_y)^2))
lasso_y_rmse
[1] 64018.6
Lasso
# cross-fitted RMSE: treatment
d <- as.matrix(pension$e401)
lasso_d_rmse <- sqrt(mean((d - m_hat)^2))
lasso_d_rmse
[1] 0.4443435
# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
[1] 0.3143722
Random Forest
# Random Forest
lgr::get_logger("mlr3")$set_threshold("warn")
randomForest <- lrn("regr.ranger")
random_forest_class <- lrn("classif.ranger")
dml_plr <- DoubleMLPLR$new(data_ml, ml_l = randomForest, ml_m = random_forest_class, n_folds = 5)
dml_plr$fit(store_predictions = TRUE) # set store_predictions=TRUE to evaluate the model
dml_plr$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 9461 1338 7.069 1.57e-12 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
forest_plr <- dml_plr$coef
forest_std_plr <- dml_plr$se
# Evaluation predictions
g_hat <- as.matrix(dml_plr$predictions$ml_l) # predictions of g_o
m_hat <- as.matrix(dml_plr$predictions$ml_m) # predictions of m_o
theta <- as.numeric(dml_plr$coef) # estimated regression coefficient
predictions_y <- as.matrix(d * theta) + g_hat # predictions for y
forest_y_rmse <- sqrt(mean((y - predictions_y)^2))
forest_y_rmse
[1] 57156.22
# cross-fitted RMSE: treatment
forest_d_rmse <- sqrt(mean((d - m_hat)^2))
forest_d_rmse
[1] 0.4586097
# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
[1] 0.3358548
Decision Trees
# Trees
lgr::get_logger("mlr3")$set_threshold("warn")
trees <- lrn("regr.rpart")
trees_class <- lrn("classif.rpart")
dml_plr <- DoubleMLPLR$new(data_ml, ml_l = trees, ml_m = trees_class, n_folds = 5)
dml_plr$fit(store_predictions = TRUE)
dml_plr$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 8390 1313 6.389 1.67e-10 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
tree_plr <- dml_plr$coef
tree_std_plr <- dml_plr$se
# Evaluation predictions
g_hat <- as.matrix(dml_plr$predictions$ml_l) # predictions of g_o
m_hat <- as.matrix(dml_plr$predictions$ml_m) # predictions of m_o
theta <- as.numeric(dml_plr$coef) # estimated regression coefficient
predictions_y <- as.matrix(d * theta) + g_hat # predictions for y
tree_y_rmse <- sqrt(mean((y - predictions_y)^2))
tree_y_rmse
[1] 56589.89
# cross-fitted RMSE: treatment
tree_d_rmse <- sqrt(mean((d - m_hat)^2))
tree_d_rmse
[1] 0.4557193
# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
[1] 0.3118507
Boosted Trees
# Boosting
boost <- lrn("regr.glmboost")
boost_class <- lrn("classif.glmboost")
dml_plr <- DoubleMLPLR$new(data_ml, ml_l = boost, ml_m = boost_class, n_folds = 5)
dml_plr$fit(store_predictions = TRUE)
dml_plr$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 9257 1320 7.013 2.33e-12 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
boost_plr <- dml_plr$coef
boost_std_plr <- dml_plr$se
# Evaluation predictions
g_hat <- as.matrix(dml_plr$predictions$ml_l) # predictions of g_o
m_hat <- as.matrix(dml_plr$predictions$ml_m) # predictions of m_o
theta <- as.numeric(dml_plr$coef) # estimated regression coefficient
predictions_y <- as.matrix(d * theta) + g_hat # predictions for y
boost_y_rmse <- sqrt(mean((y - predictions_y)^2))
boost_y_rmse
[1] 54047.3
# cross-fitted RMSE: treatment
boost_d_rmse <- sqrt(mean((d - m_hat)^2))
boost_d_rmse
[1] 0.4467967
# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
[1] 0.3154816
まとめ
table <- matrix(0, 4, 4)
table[1, 1:4] <- c(lasso_plr, forest_plr, tree_plr, boost_plr)
table[2, 1:4] <- c(lasso_std_plr, forest_std_plr, tree_std_plr, boost_std_plr)
table[3, 1:4] <- c(lasso_y_rmse, forest_y_rmse, tree_y_rmse, boost_y_rmse)
table[4, 1:4] <- c(lasso_d_rmse, forest_d_rmse, tree_d_rmse, boost_d_rmse)
rownames(table) <- c("Estimate", "Std.Error", "RMSE Y", "RMSE D")
colnames(table) <- c("Lasso", "Random Forest", "Trees", "Boosting")
tab <- xtable(table, digits = 2)
tab
% latex table generated in R 4.4.2 by xtable 1.8-4 package
% Mon Dec 2 19:01:35 2024
\begin{table}[ht]
\centering
\begin{tabular}{rrrrr}
\hline
& Lasso & Random Forest & Trees & Boosting \\
\hline
Estimate & 8828.75 & 9460.91 & 8389.91 & 9257.29 \\
Std.Error & 1299.62 & 1338.45 & 1313.14 & 1319.97 \\
RMSE Y & 64018.60 & 57156.22 & 56589.89 & 54047.30 \\
RMSE D & 0.44 & 0.46 & 0.46 & 0.45 \\
\hline
\end{tabular}
\end{table}
lasso_plr
e401
8828.752
Interactive Regression Model (IRM)
lgr::get_logger("mlr3")$set_threshold("warn")
dml_irm <- DoubleMLIRM$new(data_ml,
ml_g = lasso,
ml_m = lasso_class,
trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 7906 1116 7.085 1.39e-12 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
lasso_irm <- dml_irm$coef
lasso_std_irm <- dml_irm$se
# predictions
dml_irm$params_names()
[1] "ml_g0" "ml_g1" "ml_m"
g0_hat <- as.matrix(dml_irm$predictions$ml_g0) # predictions of g_0(D=0, X)
g1_hat <- as.matrix(dml_irm$predictions$ml_g1) # predictions of g_0(D=1, X)
g_hat <- d * g1_hat + (1 - d) * g0_hat # predictions of g_0
m_hat <- as.matrix(dml_irm$predictions$ml_m) # predictions of m_o
# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
d <- as.matrix(pension$e401)
lasso_y_irm <- sqrt(mean((y - g_hat)^2))
lasso_y_irm
[1] 71999.94
# cross-fitted RMSE: treatment
lasso_d_irm <- sqrt(mean((d - m_hat)^2))
lasso_d_irm
[1] 0.4442179
# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
[1] 0.3143722
##### forest #####
dml_irm <- DoubleMLIRM$new(data_ml,
ml_g = randomForest,
ml_m = random_forest_class,
trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 7906 1622 4.874 1.09e-06 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
forest_irm <- dml_irm$coef
forest_std_irm <- dml_plr$se
# predictions
g0_hat <- as.matrix(dml_irm$predictions$ml_g0) # predictions of g_0(D=0, X)
g1_hat <- as.matrix(dml_irm$predictions$ml_g1) # predictions of g_0(D=1, X)
g_hat <- d * g1_hat + (1 - d) * g0_hat # predictions of g_0
m_hat <- as.matrix(dml_irm$predictions$ml_m) # predictions of m_0
# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
d <- as.matrix(pension$e401)
forest_y_irm <- sqrt(mean((y - g_hat)^2))
forest_y_irm
[1] 55392.98
# cross-fitted RMSE: treatment
forest_d_irm <- sqrt(mean((d - m_hat)^2))
forest_d_irm
[1] 0.456056
# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
[1] 0.3350479
##### trees #####
dml_irm <- DoubleMLIRM$new(data_ml,
ml_g = trees, ml_m = trees_class,
trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 7987 1160 6.887 5.68e-12 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
tree_irm <- dml_irm$coef
tree_std_irm <- dml_irm$se
# predictions
g0_hat <- as.matrix(dml_irm$predictions$ml_g0) # predictions of g_0(D=0, X)
g1_hat <- as.matrix(dml_irm$predictions$ml_g1) # predictions of g_0(D=1, X)
g_hat <- d * g1_hat + (1 - d) * g0_hat # predictions of g_0
m_hat <- as.matrix(dml_irm$predictions$ml_m) # predictions of m_o
# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
d <- as.matrix(pension$e401)
tree_y_irm <- sqrt(mean((y - g_hat)^2))
tree_y_irm
[1] 57873.94
# cross-fitted RMSE: treatment
tree_d_irm <- sqrt(mean((d - m_hat)^2))
tree_d_irm
[1] 0.4551777
# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
[1] 0.3096319
##### boosting #####
dml_irm <- DoubleMLIRM$new(data_ml,
ml_g = boost, ml_m = boost_class,
trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 8846 1210 7.309 2.69e-13 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
boost_irm <- dml_irm$coef
boost_std_irm <- dml_irm$se
# predictions
g0_hat <- as.matrix(dml_irm$predictions$ml_g0) # predictions of g_0(D=0, X)
g1_hat <- as.matrix(dml_irm$predictions$ml_g1) # predictions of g_0(D=1, X)
g_hat <- d * g1_hat + (1 - d) * g0_hat # predictions of g_0
m_hat <- as.matrix(dml_irm$predictions$ml_m) # predictions of m_o
# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
d <- as.matrix(pension$e401)
boost_y_irm <- sqrt(mean((y - g_hat)^2))
boost_y_irm
[1] 54969.31
# cross-fitted RMSE: treatment
boost_d_irm <- sqrt(mean((d - m_hat)^2))
boost_d_irm
[1] 0.4468418
# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
[1] 0.3143722
まとめ
table <- matrix(0, 4, 4)
table[1, 1:4] <- c(lasso_irm, forest_irm, tree_irm, boost_irm)
table[2, 1:4] <- c(lasso_std_irm, forest_std_irm, tree_std_irm, boost_std_irm)
table[3, 1:4] <- c(lasso_y_irm, forest_y_irm, tree_y_irm, boost_y_irm)
table[4, 1:4] <- c(lasso_d_irm, forest_d_irm, tree_d_irm, boost_d_irm)
rownames(table) <- c("Estimate", "Std.Error", "RMSE Y", "RMSE D")
colnames(table) <- c("Lasso", "Random Forest", "Trees", "Boosting")
tab <- xtable(table, digits = 2)
tab
% latex table generated in R 4.4.2 by xtable 1.8-4 package
% Mon Dec 2 19:07:53 2024
\begin{table}[ht]
\centering
\begin{tabular}{rrrrr}
\hline
& Lasso & Random Forest & Trees & Boosting \\
\hline
Estimate & 7906.19 & 7905.57 & 7987.06 & 8846.44 \\
Std.Error & 1115.90 & 1319.97 & 1159.67 & 1210.31 \\
RMSE Y & 71999.94 & 55392.98 & 57873.94 & 54969.31 \\
RMSE D & 0.44 & 0.46 & 0.46 & 0.45 \\
\hline
\end{tabular}
\end{table}
lgr::get_logger("mlr3")$set_threshold("warn")
dml_irm <- DoubleMLIRM$new(data_ml,
ml_g = randomForest,
ml_m = lasso_class,
trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
Estimates and significance testing of the effect of target variables
Estimate. Std. Error t value Pr(>|t|)
e401 8744 1114 7.851 4.11e-15 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
best_irm <- dml_irm$coef
best_std_irm <- dml_irm$se
---
title: "Causal ML Book Ch10.2-10.3 (2)"
author: "Ryuki Kobayashi"
date: "2024/11/25"
output: html_notebook
---

### The Effect of 401(k) Eligibility on Net Financial Assets

- 推定コードと結果

#### **Data**

- `hdm` パッケージに含まれており、ダウンロード可能である

```{r}
# install.packages("renv")
# renv::restore()
# remotes::install_github("mlr-org/mlr3extralearners", force = TRUE) # needed to run boosting

pacman::p_load(
  xtable,
  hdm,
  sandwich,
  ggplot2,
  randomForest,
  data.table,
  glmnet,
  rpart,
  gbm,
  DoubleML, 
  mlr3learners, 
  mlr3, 
  data.table, 
  randomForest, 
  ranger,
  mlr3extralearners,
  mboost
)
```


```{r}
data(pension)
data <- pension
dim(data)
```


```{r}
help(pension)
```


```{r}
hist_e401 <- ggplot(data, aes(x = e401, fill = factor(e401))) +
  geom_bar()
hist_e401
```


```{r}
dens_net_tfa <- ggplot(data, aes(x = net_tfa, color = factor(e401), fill = factor(e401))) +
  geom_density() +
  xlim(c(-20000, 150000)) +
  facet_wrap(. ~ e401)

dens_net_tfa
```


```{r}
e1 <- data[data$e401 == 1, ]
e0 <- data[data$e401 == 0, ]
round(mean(e1$net_tfa) - mean(e0$net_tfa), 0)
```


```{r}
p1 <- data[data$p401 == 1, ]
p0 <- data[data$p401 == 0, ]
round(mean(p1$net_tfa) - mean(p0$net_tfa), 0)
```


```{r}
# outcome variable
y <- data[, "net_tfa"]
# treatment variable
D <- data[, "e401"]
D2 <- data[, "p401"]
D3 <- data[, "a401"]

columns_to_drop <- c(
  "e401", "p401", "a401", "tw", "tfa", "net_tfa", "tfa_he",
  "hval", "hmort", "hequity",
  "nifa", "net_nifa", "net_n401", "ira",
  "dum91", "icat", "ecat", "zhat",
  "i1", "i2", "i3", "i4", "i5", "i6", "i7",
  "a1", "a2", "a3", "a4", "a5"
)

# covariates
X <- data[, !(names(data) %in% columns_to_drop)]
```


```{r}
# Constructing the controls
x_formula <- paste("~ 0 + poly(age, 6, raw=TRUE) + poly(inc, 8, raw=TRUE) + poly(educ, 4, raw=TRUE) ",
                   "+ poly(fsize, 2, raw=TRUE) + male + marr + twoearn + db + pira + hown")
X <- as.data.table(model.frame(x_formula, X))
head(X)
```

---

#### **Partiall Linear Model (PLM)**

```{r}
dml2_for_plm <- function(x, d, y, dreg, yreg, nfold = 3, method = "regression") {
  nobs <- nrow(x) # number of observations
  foldid <- rep.int(1:nfold, times = ceiling(nobs / nfold))[sample.int(nobs)] # define folds indices
  I <- split(1:nobs, foldid) # split observation indices into folds
  ytil <- dtil <- rep(NA, nobs)
  cat("fold: ")
  for (b in seq_along(I)) {
    if (method == "regression") {
      dfit <- dreg(x[-I[[b]], ], d[-I[[b]]]) # take a fold out
      yfit <- yreg(x[-I[[b]], ], y[-I[[b]]]) # take a foldt out
      dhat <- predict(dfit, x[I[[b]], ], type = "response") # predict the left-out fold
      yhat <- predict(yfit, x[I[[b]], ], type = "response") # predict the left-out fold
      dtil[I[[b]]] <- (d[I[[b]]] - dhat) # record residual for the left-out fold
      ytil[I[[b]]] <- (y[I[[b]]] - yhat) # record residial for the left-out fold
    } else if (method == "randomforest") {
      dfit <- dreg(x[-I[[b]], ], as.factor(d)[-I[[b]]]) # take a fold out
      yfit <- yreg(x[-I[[b]], ], y[-I[[b]]]) # take a fold out
      dhat <- predict(dfit, x[I[[b]], ], type = "prob")[, 2] # predict the left-out fold
      yhat <- predict(yfit, x[I[[b]], ], type = "response") # predict the left-out fold
      dtil[I[[b]]] <- (d[I[[b]]] - dhat) # record residual for the left-out fold
      ytil[I[[b]]] <- (y[I[[b]]] - yhat) # record residial for the left-out fold
    } else if (method == "decisiontrees") {
      dfit <- dreg(x[-I[[b]], ], as.factor(d)[-I[[b]]]) # take a fold out
      yfit <- yreg(x[-I[[b]], ], y[-I[[b]]]) # take a fold out
      dhat <- predict(dfit, x[I[[b]], ])[, 2] # predict the left-out fold
      yhat <- predict(yfit, x[I[[b]], ]) # predict the left-out fold
      dtil[I[[b]]] <- (d[I[[b]]] - dhat) # record residual for the left-out fold
      ytil[I[[b]]] <- (y[I[[b]]] - yhat) # record residial for the left-out fold
    } else if (method == "boostedtrees") {
      dfit <- dreg(x[-I[[b]], ], d[-I[[b]]]) # take a fold out
      yfit <- yreg(x[-I[[b]], ], y[-I[[b]]]) # take a fold out
      dhat <- predict(dfit, x[I[[b]], ], type = "response") # predict the left-out fold
      yhat <- predict(yfit, x[I[[b]], ], type = "response") # predict the left-out fold
      dtil[I[[b]]] <- (d[I[[b]]] - dhat) # record residual for the left-out fold
      ytil[I[[b]]] <- (y[I[[b]]] - yhat) # record residial for the left-out fold
    }
    cat(b, " ")
  }
  rfit <- lm(ytil ~ dtil) # estimate the main parameter by regressing one residual on the other
  coef_est <- coef(rfit)[2] # extract coefficient
  se <- sqrt(vcovHC(rfit)[2, 2]) # record robust standard error
  cat(sprintf("\ncoef (se) = %g (%g)\n", coef_est, se)) # printing output
  return(list(coef_est = coef_est, se = se, dtil = dtil, ytil = ytil)) # save output and residuals
}
```


```{r}
summaryPLR <- function(point, stderr, resD, resy, name) {
  data <- data.frame(
    estimate = point, # point estimate
    stderr = stderr, # standard error
    lower = point - 1.96 * stderr, # lower end of 95% confidence interval
    upper = point + 1.96 * stderr, # upper end of 95% confidence interval
    `rmse y` = sqrt(mean(resy^2)), # RMSE of model that predicts outcome y
    `rmse D` = sqrt(mean(resD^2)), # RMSE of model that predicts treatment D
    `accuracy D` = mean(abs(resD) < 0.5) # binary classification accuracy of model for D
  )
  rownames(data) <- name
  return(data)
}
```

**Double Lasso**

- 処置の推定において、関数型を線形確率モデルにする場合

```{r}
# DML with LassoCV
set.seed(123)
cat(sprintf("\nDML with Lasso CV \n"))

dreg_lasso_cv <- function(x, d) {
  cv.glmnet(x, d, family = "gaussian", alpha = 1, nfolds = 5)
}
yreg_lasso_cv <- function(x, y) {
  cv.glmnet(x, y, family = "gaussian", alpha = 1, nfolds = 5)
}

dml2_results <- dml2_for_plm(as.matrix(X), D, y, dreg_lasso_cv, yreg_lasso_cv, nfold = 5)

sum_lasso_cv <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
                           dml2_results$ytil, name = "LassoCV")
tableplr <- data.frame()
tableplr <- rbind(sum_lasso_cv)
tableplr
``` 


```{r}
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_lasso <- D - dml2_results$dtil
yhat_lasso <- y - dml2_results$ytil
```

- 処置の推定において、Logistic 回帰を用いる場合

```{r}
# DML with Lasso/Logistic
set.seed(123)
cat(sprintf("\nDML with Lasso/Logistic \n"))

dreg_logistic_cv <- function(x, d) {
  cv.glmnet(x, d, family = "binomial", alpha = 0, nfolds = 5)
}
yreg_lasso_cv <- function(x, y) {
  cv.glmnet(x, y, family = "gaussian", alpha = 1, nfolds = 5)
}

dml2_results <- dml2_for_plm(as.matrix(X), D, y, dreg_logistic_cv, yreg_lasso_cv, nfold = 5)
sum_lasso_logistic_cv <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
                                    dml2_results$ytil, name = "LassoCV/LogisticCV")
tableplr <- rbind(tableplr, sum_lasso_logistic_cv)
tableplr
```

```{r}
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_lasso_logistic <- D - dml2_results$dtil
yhat_lasso_logistic <- y - dml2_results$ytil
```


**Random Forest**

```{r}
# DML with Random Forest
set.seed(123)
cat(sprintf("\nDML with Random Forest \n"))

dreg_rf <- function(x, d) {
  randomForest(x, d, ntree = 1000, nodesize = 10)
} # ML method=Forest
yreg_rf <- function(x, y) {
  randomForest(x, y, ntree = 1000, nodesize = 10)
} # ML method=Forest

dml2_results <- dml2_for_plm(as.matrix(X), D, y, dreg_rf, yreg_rf, nfold = 5, method = "randomforest")
sum_rf <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
                     dml2_results$ytil, name = "Random Forest")
tableplr <- rbind(tableplr, sum_rf)
tableplr
```


```{r}
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_rf <- D - dml2_results$dtil
dhat_rf <- y - dml2_results$ytil
```

**Decision Trees**

```{r}
# DML with Decision Trees
set.seed(123)
cat(sprintf("\nDML with Decision Trees \n"))

dreg_tr <- function(x, d) {
  rpart(as.formula("D~."), cbind(data.frame(D = d), x), method = "class", minbucket = 10, cp = 0.001)
}
dreg_tr <- function(x, y) {
  rpart(as.formula("y~."), cbind(data.frame(y = y), x), minbucket = 10, cp = 0.001)
}

# decision tree takes in X as dataframe, not matrix/array
dml2_results <- dml2_for_plm(X, D, y, dreg_tr, dreg_tr, nfold = 5, method = "decisiontrees")
sum_tr <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
                     dml2_results$ytil, name = "Decision Trees")
tableplr <- rbind(tableplr, sum_tr)
tableplr
```

```{r}
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_tr <- D - dml2_results$dtil
yhat_tr <- y - dml2_results$ytil
```

**Boosted Trees**

```{r}
# DML with Boosted Trees
set.seed(123)
cat(sprintf("\nDML with Boosted Trees \n"))

# NB: early stopping cannot easily be implemented with gbm
## set n.trees = best, where best <- gbm.perf(dreg_boost, plot.it = FALSE)
dreg_boost <- function(x, d) {
  gbm(as.formula("D~."), cbind(data.frame(D = d), x), distribution = "bernoulli",
      interaction.depth = 2, n.trees = 100, shrinkage = .1)
}
yreg_boost <- function(x, y) {
  gbm(as.formula("y~."), cbind(data.frame(y = y), x), distribution = "gaussian",
      interaction.depth = 2, n.trees = 100, shrinkage = .1)
}

# passing these through regression as type="response", and D should not be factor!
dml2_results <- dml2_for_plm(X, D, y, dreg_boost, yreg_boost, nfold = 5, method = "boostedtrees")
sum_boost <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
                        dml2_results$ytil, name = "Boosted Trees")
tableplr <- rbind(tableplr, sum_boost)
tableplr
```

```{r}
# Because residuals are output, reconstruct fitted values for use in ensemble
dhat_boost <- D - dml2_results$dtil
yhat_boost <- y - dml2_results$ytil
```

**Ensemble**

```{r}
# Best fit is boosted trees for both D and Y

sum_best <- summaryPLR(dml2_results$coef_est, dml2_results$se, dml2_results$dtil,
                       dml2_results$ytil, name = "Best")
tableplr <- rbind(tableplr, sum_best)
tableplr
```

```{r}
# Least squares model average

ma_dtil <- lm(D ~ dhat_lasso + dhat_lasso_logistic + dhat_rf + dhat_tr + dhat_boost)$residuals
ma_ytil <- lm(y ~ yhat_lasso + yhat_lasso_logistic + dhat_rf + yhat_tr + yhat_boost)$residuals

rfit <- lm(ma_ytil ~ ma_dtil) # estimate the main parameter by regressing one residual on the other
coef_est <- coef(rfit)[2] # extract coefficient
se <- sqrt(vcovHC(rfit)[2, 2]) # record robust standard error

sum.ma <- summaryPLR(coef_est, se, ma_dtil, ma_ytil, name = "Model Average")
tableplr <- rbind(tableplr, sum.ma)
tableplr
```


---

#### **Interactive Regression Model (IRM)**

```{r}
dml2_for_irm <- function(x, d, y, dreg, yreg0, yreg1, trimming = 0.01, nfold = 5, method = "regression") {
  yhat0 <- rep(0, length(y))
  yhat1 <- rep(0, length(y))
  Dhat <- rep(0, length(d))

  nobs <- nrow(x) # number of observations
  foldid <- rep.int(1:nfold, times = ceiling(nobs / nfold))[sample.int(nobs)] # define folds indices
  I <- split(1:nobs, foldid) # split observation indices into folds
  ytil <- dtil <- rep(NA, nobs)

  cat("fold: ")
  for (b in seq_along(I)) {
    # define helpful variables
    Dnotb <- d[-I[[b]]]
    Xb <- X[I[[b]], ]
    Xnotb <- X[-I[[b]], ]

    # training dfs subsetted on the -I[[b]] fold
    XD0 <- X[-I[[b]], ][d[-I[[b]]] == 0]
    yD0 <- y[-I[[b]]][d[-I[[b]]] == 0]
    XD1 <- X[-I[[b]], ][d[-I[[b]]] == 1]
    yD1 <- y[-I[[b]]][d[-I[[b]]] == 1]

    if (method == "regression") {
      yfit0 <- yreg0(as.matrix(XD0), yD0)
      yfit1 <- yreg1(as.matrix(XD1), yD1)
      yhat0[I[[b]]] <- predict(yfit0, as.matrix(Xb)) # default is type = "response" for glmnet family gaussian
      yhat1[I[[b]]] <- predict(yfit1, as.matrix(Xb))
    } else if (method == "randomforest") {
      yfit0 <- yreg0(XD0, yD0)
      yfit1 <- yreg1(XD1, yD1)
      yhat0[I[[b]]] <- predict(yfit0, Xb) # default is type = "response" for rf
      yhat1[I[[b]]] <- predict(yfit1, Xb)
    } else if (method == "decisiontrees") {
      yfit0 <- yreg0(XD0, yD0)
      yfit1 <- yreg1(XD1, yD1)
      yhat0[I[[b]]] <- predict(yfit0, Xb) # default is type = "vector" for decision
      yhat1[I[[b]]] <- predict(yfit1, Xb)
    } else if (method == "boostedtrees") {
      yfit0 <- yreg0(as.data.frame(XD0), yD0)
      yfit1 <- yreg1(as.data.frame(XD1), yD1)
      yhat0[I[[b]]] <- predict(yfit0, Xb) # default is type = "response" for boosted
      yhat1[I[[b]]] <- predict(yfit1, Xb)
    }

    # propensity scores:
    if (method == "regression") {
      dfit_b <- dreg(as.matrix(Xnotb), Dnotb)
      dhat_b <- predict(dfit_b, as.matrix(Xb), type = "response") # default is type="link" for family binomial!
    } else if (method == "randomforest") {
      dfit_b <- dreg(Xnotb, as.factor(Dnotb))
      dhat_b <- predict(dfit_b, Xb, type = "prob")[, 2]
    } else if (method == "decisiontrees") {
      dfit_b <- dreg(Xnotb, Dnotb)
      dhat_b <- predict(dfit_b, Xb)[, 2]
    } else if (method == "boostedtrees") {
      dfit_b <- dreg(as.data.frame(Xnotb), Dnotb)
      dhat_b <- predict(dfit_b, Xb, type = "response")
    }
    dhat_b <- pmax(pmin(dhat_b, 1 - trimming), trimming) # trimming so scores are between [trimming, (1-trimming)]
    Dhat[I[[b]]] <- dhat_b

    cat(b, " ")
  }

  # Prediction of treatment and outcome for observed instrument
  yhat <- yhat0 * (1 - D) + yhat1 * D
  # residuals
  ytil <- y - yhat
  dtil <- D - Dhat
  # doubly robust quantity for every sample
  drhat <- yhat1 - yhat0 + (y - yhat) * (D / Dhat - (1 - D) / (1 - Dhat))
  coef_est <- mean(drhat)
  vari <- var(drhat)
  se <- sqrt(vari / nrow(X))
  cat("point", coef_est)
  cat("se", se)
  return(list(coef_est = coef_est, se = se, ytil = ytil, dtil = dtil, drhat = drhat,
              yhat0 = yhat0, yhat1 = yhat1, dhat = Dhat, yhat = yhat))
}

summaryIRM <- function(coef_est, se, ytil, dtil, drhat, name) {
  summary_data <- data.frame(
    estimate = coef_est, # point estimate
    se = se, # standard error
    lower = coef_est - 1.96 * se, # lower end of 95% confidence interval
    upper = coef_est + 1.96 * se, # upper end of 95% confidence interval
    rmse_y = sqrt(mean(ytil^2)), # res of model that predicts outcome y
    rmse_D = sqrt(mean(dtil^2)), # res of model that predicts treatment D
    accuracy_D = mean(abs(dtil) < 0.5) # binary classification accuracy of model for D
  )
  row.names(summary_data) <- name
  return(summary_data)
}
```

**Double Lasso**

```{r}
# DML with Lasso/Logistic
set.seed(123)
cat(sprintf("\nDML with LassoCV/Logistic \n"))

dreg_lasso_cv <- function(x, d) {
  cv.glmnet(x, d, family = "binomial", alpha = 0, nfolds = 5)
}
yreg0_lasso_cv <- function(x, y) {
  cv.glmnet(x, y, family = "gaussian", alpha = 1, nfolds = 5)
}
yreg1_lasso_cv <- function(x, y) {
  cv.glmnet(x, y, family = "gaussian", alpha = 1, nfolds = 5)
}

# more folds seems to help stabilize finite sample performance
dml2_results <- dml2_for_irm(X, D, y, dreg_lasso_cv, yreg0_lasso_cv, yreg1_lasso_cv, nfold = 5)
sum_lasso_cv <- summaryIRM(dml2_results$coef_est, dml2_results$se, dml2_results$ytil, dml2_results$dtil,
                           dml2_results$drhat, name = "LassoCVLogistic")
tableirm <- data.frame()
tableirm <- rbind(sum_lasso_cv)
tableirm

yhat0_lasso <- dml2_results$yhat0
yhat1_lasso <- dml2_results$yhat1
dhat_lasso <- dml2_results$dhat
yhat_lasso <- dml2_results$yhat
```

**Random Forest**

```{r}
# DML with Random Forest
set.seed(123)
cat(sprintf("\nDML with Random Forest \n"))

dreg_rf <- function(x, d) {
  randomForest(x, d, ntree = 1000, nodesize = 10)
} # ML method=Forest
yreg0_rf <- function(x, y) {
  randomForest(x, y, ntree = 1000, nodesize = 10)
} # ML method=Forest
yreg1_rf <- function(x, y) {
  randomForest(x, y, ntree = 1000, nodesize = 10)
} # ML method=Forest


dml2_results <- dml2_for_irm(as.matrix(X), D, y, dreg_rf, yreg0_rf, yreg1_rf, nfold = 5, method = "randomforest")
sum_rf <- summaryIRM(dml2_results$coef_est, dml2_results$se, dml2_results$ytil, dml2_results$dtil,
                     dml2_results$drhat, name = "Random Forest")
tableirm <- rbind(tableirm, sum_rf)
tableirm

yhat0_rf <- dml2_results$yhat0
yhat1_rf <- dml2_results$yhat1
dhat_rf <- dml2_results$dhat
dhat_rf <- dml2_results$yhat
```

**Decision Trees**

```{r}
# DML with Decision Trees
set.seed(123)
cat(sprintf("\nDML with Decision Trees \n"))

dreg_tr <- function(x, d) {
  rpart(as.formula("D~."), cbind(data.frame(D = d), x), method = "class", minbucket = 10, cp = 0.001)
}
yreg0_tr <- function(x, y) {
  rpart(as.formula("y~."), cbind(data.frame(y = y), x), minbucket = 10, cp = 0.001)
}
yreg1_tr <- function(x, y) {
  rpart(as.formula("y~."), cbind(data.frame(y = y), x), minbucket = 10, cp = 0.001)
}

dml2_results <- dml2_for_irm(X, D, y, dreg_tr, yreg0_tr, yreg1_tr, nfold = 5, method = "decisiontrees")
sum_tr <- summaryIRM(dml2_results$coef_est, dml2_results$se, dml2_results$ytil, dml2_results$dtil,
                     dml2_results$drhat, name = "Decision Trees")
tableirm <- rbind(tableirm, sum_tr)
tableirm

yhat0_tr <- dml2_results$yhat0
yhat1_tr <- dml2_results$yhat1
dhat_tr <- dml2_results$dhat
yhat_tr <- dml2_results$yhat
```

**Boosted Trees**

```{r}
# DML with Boosted Trees
set.seed(123)
cat(sprintf("\nDML with Boosted Trees \n"))

# NB: early stopping cannot easily be implemented with gbm
## set n.trees = best, where best <- gbm.perf(dreg_boost, plot.it = FALSE)
dreg_boost <- function(x, d) {
  gbm(as.formula("D~."), cbind(data.frame(D = d), x), distribution = "bernoulli",
      interaction.depth = 2, n.trees = 100, shrinkage = .1)
}
yreg0_boost <- function(x, y) {
  gbm(as.formula("y~."), cbind(data.frame(y = y), x), distribution = "gaussian",
      interaction.depth = 2, n.trees = 100, shrinkage = .1)
}
yreg1_boost <- function(x, y) {
  gbm(as.formula("y~."), cbind(data.frame(y = y), x), distribution = "gaussian",
      interaction.depth = 2, n.trees = 100, shrinkage = .1)
}

# passing these through regression as type="response", and D should not be factor!
dml2_results <- dml2_for_irm(X, D, y, dreg_boost, yreg0_boost, yreg1_boost, nfold = 5, method = "boostedtrees")
sum_boost <- summaryIRM(dml2_results$coef_est, dml2_results$se, dml2_results$ytil, dml2_results$dtil,
                        dml2_results$drhat, name = "Boosted Trees")
tableirm <- rbind(tableirm, sum_boost)
tableirm

yhat0_boost <- dml2_results$yhat0
yhat1_boost <- dml2_results$yhat1
dhat_boost <- dml2_results$dhat
yhat_boost <- dml2_results$yhat
```

**Ensemble**

```{r}
# Ensembles

# Best
# We'll look at model that does best for Y overall. Could also use different model for Y0 and Y1
# Here, the best performance for Y is the random forest and for D the boosted tree

# residuals
ytil <- y - dhat_rf
dtil <- D - dhat_boost
# doubly robust quantity for every sample
drhat <- yhat1_rf - yhat0_rf + (y - dhat_rf) * (D / dhat_boost - (1 - D) / (1 - dhat_boost))
coef_est <- mean(drhat)
vari <- var(drhat)
se <- sqrt(vari / nrow(X))

sum_best <- summaryIRM(coef_est, se, ytil, dtil, drhat, name = "Best")
tableirm <- rbind(tableirm, sum_best)
tableirm
```

```{r}
# Least squares model average
# We'll look at weights that do best job for Y overall. Could also use different weights for Y0 and Y1

ma_dw <- lm(D ~ dhat_lasso + dhat_rf + dhat_tr + dhat_boost)$coef
ma_yw <- lm(y ~ yhat_lasso + dhat_rf + yhat_tr + yhat_boost)$coef

Dhats <- cbind(as.matrix(rep(1, nrow(X))), dhat_lasso, dhat_rf, dhat_tr, dhat_boost)
Y0s <- cbind(as.matrix(rep(1, nrow(X))), yhat0_lasso, yhat0_rf, yhat0_tr, yhat0_boost)
Y1s <- cbind(as.matrix(rep(1, nrow(X))), yhat1_lasso, yhat1_rf, yhat1_tr, yhat1_boost)

dhat <- Dhats %*% as.matrix(ma_dw)
yhat0 <- Y0s %*% as.matrix(ma_yw)
yhat1 <- Y1s %*% as.matrix(ma_yw)

# Prediction of treatment and outcome for observed instrument
yhat <- yhat0 * (1 - D) + yhat1 * D
# residuals
ytil <- y - yhat
dtil <- D - dhat
# doubly robust quantity for every sample
drhat <- yhat1 - yhat0 + (y - yhat) * (D / dhat - (1 - D) / (1 - dhat))
coef_est <- mean(drhat)
vari <- var(drhat)
se <- sqrt(vari / nrow(X))

sum.ma <- summaryIRM(coef_est, se, ytil, dtil, drhat, name = "Model Average")
tableirm <- rbind(tableirm, sum.ma)
tableirm
```


---

### DoubleML package による推定

- 今までは自ら関数を書いていたが、`EconML` (Python) や `DoubleML` (R & Python) などのパッケージが利用可能である
  - [DoubleML パッケージのサイト](https://docs.doubleml.org/stable/index.html)
  - [ml3 パッケージ](https://mlr3book.mlr-org.com) を内部で動かしている

**データ作成**

```{r}
# Constructing the data (as DoubleMLData)
formula_flex <- paste("net_tfa ~ e401 + poly(age, 6, raw=TRUE) + poly(inc, 8, raw=TRUE) ",
                      "+ poly(educ, 4, raw=TRUE) + poly(fsize, 2, raw=TRUE) + marr + twoearn + db + pira + hown")
model_flex <- as.data.table(model.frame(formula_flex, pension))
x_cols <- colnames(model_flex)[-c(1, 2)]
data_ml <- DoubleMLData$new(model_flex, y_col = "net_tfa", d_cols = "e401", x_cols = x_cols)

p <- dim(model_flex)[2] - 2
p
```


#### **Partiall Linear Model (PLM)**

```{r}
# Estimating the PLR
lgr::get_logger("mlr3")$set_threshold("warn")
lasso <- lrn("regr.cv_glmnet", nfolds = 5, s = "lambda.min")
lasso_class <- lrn("classif.cv_glmnet", nfolds = 5, s = "lambda.min")

dml_plr <- DoubleMLPLR$new(data_ml, ml_l = lasso, ml_m = lasso_class, n_folds = 5)
dml_plr$fit(store_predictions = TRUE)
dml_plr$summary()
lasso_plr <- dml_plr$coef
lasso_std_plr <- dml_plr$se
```

```{r}
dml_plr$params_names()
g_hat <- as.matrix(dml_plr$predictions$ml_l) # predictions of g_o
m_hat <- as.matrix(dml_plr$predictions$ml_m) # predictions of m_o
```

```{r}
# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
theta <- as.numeric(dml_plr$coef) # estimated regression coefficient
d <- as.matrix(pension$e401)
predictions_y <- as.matrix(d * theta) + g_hat # predictions for y
lasso_y_rmse <- sqrt(mean((y - predictions_y)^2))
lasso_y_rmse
```

**Lasso**

```{r}
# cross-fitted RMSE: treatment
d <- as.matrix(pension$e401)
lasso_d_rmse <- sqrt(mean((d - m_hat)^2))
lasso_d_rmse

# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
```

**Random Forest**

```{r}
# Random Forest
lgr::get_logger("mlr3")$set_threshold("warn")
randomForest <- lrn("regr.ranger")
random_forest_class <- lrn("classif.ranger")

dml_plr <- DoubleMLPLR$new(data_ml, ml_l = randomForest, ml_m = random_forest_class, n_folds = 5)
dml_plr$fit(store_predictions = TRUE) # set store_predictions=TRUE to evaluate the model
dml_plr$summary()
forest_plr <- dml_plr$coef
forest_std_plr <- dml_plr$se
```


```{r}
# Evaluation predictions
g_hat <- as.matrix(dml_plr$predictions$ml_l) # predictions of g_o
m_hat <- as.matrix(dml_plr$predictions$ml_m) # predictions of m_o
theta <- as.numeric(dml_plr$coef) # estimated regression coefficient
predictions_y <- as.matrix(d * theta) + g_hat # predictions for y
forest_y_rmse <- sqrt(mean((y - predictions_y)^2))
forest_y_rmse

# cross-fitted RMSE: treatment
forest_d_rmse <- sqrt(mean((d - m_hat)^2))
forest_d_rmse

# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
```


**Decision Trees**

```{r}
# Trees
lgr::get_logger("mlr3")$set_threshold("warn")

trees <- lrn("regr.rpart")
trees_class <- lrn("classif.rpart")

dml_plr <- DoubleMLPLR$new(data_ml, ml_l = trees, ml_m = trees_class, n_folds = 5)
dml_plr$fit(store_predictions = TRUE)
dml_plr$summary()
tree_plr <- dml_plr$coef
tree_std_plr <- dml_plr$se

# Evaluation predictions
g_hat <- as.matrix(dml_plr$predictions$ml_l) # predictions of g_o
m_hat <- as.matrix(dml_plr$predictions$ml_m) # predictions of m_o
theta <- as.numeric(dml_plr$coef) # estimated regression coefficient
predictions_y <- as.matrix(d * theta) + g_hat # predictions for y
tree_y_rmse <- sqrt(mean((y - predictions_y)^2))
tree_y_rmse

# cross-fitted RMSE: treatment
tree_d_rmse <- sqrt(mean((d - m_hat)^2))
tree_d_rmse

# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
```


**Boosted Trees**

```{r}
# Boosting
boost <- lrn("regr.glmboost")
boost_class <- lrn("classif.glmboost")

dml_plr <- DoubleMLPLR$new(data_ml, ml_l = boost, ml_m = boost_class, n_folds = 5)
dml_plr$fit(store_predictions = TRUE)
dml_plr$summary()
boost_plr <- dml_plr$coef
boost_std_plr <- dml_plr$se

# Evaluation predictions
g_hat <- as.matrix(dml_plr$predictions$ml_l) # predictions of g_o
m_hat <- as.matrix(dml_plr$predictions$ml_m) # predictions of m_o
theta <- as.numeric(dml_plr$coef) # estimated regression coefficient
predictions_y <- as.matrix(d * theta) + g_hat # predictions for y
boost_y_rmse <- sqrt(mean((y - predictions_y)^2))
boost_y_rmse

# cross-fitted RMSE: treatment
boost_d_rmse <- sqrt(mean((d - m_hat)^2))
boost_d_rmse

# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
```

**まとめ**

```{r}
table <- matrix(0, 4, 4)
table[1, 1:4] <- c(lasso_plr, forest_plr, tree_plr, boost_plr)
table[2, 1:4] <- c(lasso_std_plr, forest_std_plr, tree_std_plr, boost_std_plr)
table[3, 1:4] <- c(lasso_y_rmse, forest_y_rmse, tree_y_rmse, boost_y_rmse)
table[4, 1:4] <- c(lasso_d_rmse, forest_d_rmse, tree_d_rmse, boost_d_rmse)
rownames(table) <- c("Estimate", "Std.Error", "RMSE Y", "RMSE D")
colnames(table) <- c("Lasso", "Random Forest", "Trees", "Boosting")
tab <- xtable(table, digits = 2)
tab

lasso_plr
```
---

#### **Interactive Regression Model (IRM)**

```{r}
lgr::get_logger("mlr3")$set_threshold("warn")
dml_irm <- DoubleMLIRM$new(data_ml,
  ml_g = lasso,
  ml_m = lasso_class,
  trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
lasso_irm <- dml_irm$coef
lasso_std_irm <- dml_irm$se


# predictions
dml_irm$params_names()
g0_hat <- as.matrix(dml_irm$predictions$ml_g0) # predictions of g_0(D=0, X)
g1_hat <- as.matrix(dml_irm$predictions$ml_g1) # predictions of g_0(D=1, X)
g_hat <- d * g1_hat + (1 - d) * g0_hat # predictions of g_0
m_hat <- as.matrix(dml_irm$predictions$ml_m) # predictions of m_o
```

```{r}
# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
d <- as.matrix(pension$e401)
lasso_y_irm <- sqrt(mean((y - g_hat)^2))
lasso_y_irm

# cross-fitted RMSE: treatment
lasso_d_irm <- sqrt(mean((d - m_hat)^2))
lasso_d_irm

# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
```



```{r}
##### forest #####

dml_irm <- DoubleMLIRM$new(data_ml,
  ml_g = randomForest,
  ml_m = random_forest_class,
  trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
forest_irm <- dml_irm$coef
forest_std_irm <- dml_plr$se

# predictions
g0_hat <- as.matrix(dml_irm$predictions$ml_g0) # predictions of g_0(D=0, X)
g1_hat <- as.matrix(dml_irm$predictions$ml_g1) # predictions of g_0(D=1, X)
g_hat <- d * g1_hat + (1 - d) * g0_hat # predictions of g_0
m_hat <- as.matrix(dml_irm$predictions$ml_m) # predictions of m_0

# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
d <- as.matrix(pension$e401)
forest_y_irm <- sqrt(mean((y - g_hat)^2))
forest_y_irm

# cross-fitted RMSE: treatment
forest_d_irm <- sqrt(mean((d - m_hat)^2))
forest_d_irm

# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)

##### trees #####

dml_irm <- DoubleMLIRM$new(data_ml,
  ml_g = trees, ml_m = trees_class,
  trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
tree_irm <- dml_irm$coef
tree_std_irm <- dml_irm$se

# predictions
g0_hat <- as.matrix(dml_irm$predictions$ml_g0) # predictions of g_0(D=0, X)
g1_hat <- as.matrix(dml_irm$predictions$ml_g1) # predictions of g_0(D=1, X)
g_hat <- d * g1_hat + (1 - d) * g0_hat # predictions of g_0
m_hat <- as.matrix(dml_irm$predictions$ml_m) # predictions of m_o

# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
d <- as.matrix(pension$e401)
tree_y_irm <- sqrt(mean((y - g_hat)^2))
tree_y_irm

# cross-fitted RMSE: treatment
tree_d_irm <- sqrt(mean((d - m_hat)^2))
tree_d_irm

# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)


##### boosting #####

dml_irm <- DoubleMLIRM$new(data_ml,
  ml_g = boost, ml_m = boost_class,
  trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
boost_irm <- dml_irm$coef
boost_std_irm <- dml_irm$se

# predictions
g0_hat <- as.matrix(dml_irm$predictions$ml_g0) # predictions of g_0(D=0, X)
g1_hat <- as.matrix(dml_irm$predictions$ml_g1) # predictions of g_0(D=1, X)
g_hat <- d * g1_hat + (1 - d) * g0_hat # predictions of g_0
m_hat <- as.matrix(dml_irm$predictions$ml_m) # predictions of m_o

# cross-fitted RMSE: outcome
y <- as.matrix(pension$net_tfa) # true observations
d <- as.matrix(pension$e401)
boost_y_irm <- sqrt(mean((y - g_hat)^2))
boost_y_irm

# cross-fitted RMSE: treatment
boost_d_irm <- sqrt(mean((d - m_hat)^2))
boost_d_irm

# cross-fitted ce: treatment
mean(ifelse(m_hat > 0.5, 1, 0) != d)
```


**まとめ**

```{r}
table <- matrix(0, 4, 4)
table[1, 1:4] <- c(lasso_irm, forest_irm, tree_irm, boost_irm)
table[2, 1:4] <- c(lasso_std_irm, forest_std_irm, tree_std_irm, boost_std_irm)
table[3, 1:4] <- c(lasso_y_irm, forest_y_irm, tree_y_irm, boost_y_irm)
table[4, 1:4] <- c(lasso_d_irm, forest_d_irm, tree_d_irm, boost_d_irm)
rownames(table) <- c("Estimate", "Std.Error", "RMSE Y", "RMSE D")
colnames(table) <- c("Lasso", "Random Forest", "Trees", "Boosting")
tab <- xtable(table, digits = 2)
tab
```


```{r}
lgr::get_logger("mlr3")$set_threshold("warn")
dml_irm <- DoubleMLIRM$new(data_ml,
  ml_g = randomForest,
  ml_m = lasso_class,
  trimming_threshold = 0.01, n_folds = 5
)
dml_irm$fit(store_predictions = TRUE)
dml_irm$summary()
best_irm <- dml_irm$coef
best_std_irm <- dml_irm$se
```

---

### **分析結果と考察**

#### **Table 10.4 Estimated Effect of 401(k) Eligibility on Net Financial Assets**

|                           | Lasso  | Tree   | Forest | Boost  | Best   | Ensemble |
|---------------------------|--------|--------|--------|--------|--------|----------|
| **A. Partially Linear Regression Model** |        |        |        |        |        |          |
| Estimate                  | 9418   | 8634   | 9112   | 8859   | 8859   | 9051     |
| Std. Error                | (1476) | (1303) | (1281) | (1321) | (1321) | (1315)   |
| RMSE D                    | 0.447  | 0.457  | 0.459  | 0.443  | 0.443  | 0.443    |
| RMSE Y                    | 58242  | 56819  | 55385  | 54153  | 54153  | 53917    |
| **B. Interactive Regression Model**     |        |        |        |        |        |          |
| Estimate                  | 8860   | 7856   | 8349   | 7871   | 8204   | 8146     |
| Std. Error                | (1347) | (1250) | (1502) | (1157) | (1144) | (1142)   |
| RMSE D                    | 0.448  | 0.457  | 0.459  | 0.443  | 0.443  | 0.443    |
| RMSE Y                    | 58300  | 54866  | 57293  | 55112  | 54866  | 53804    |

- 推定の補足
  - 傾向スコアの極端な値による推定への影響を防ぐため、(\(0.01\) と \(0.99\)) の範囲でトリミング

- **結果の概要**
   - 共変量を含まないモデルでは、401(k)への加入資格が金融資産に与える影響は19,559ドルと推定
   - 共変量をコントロールすることにより、理論的な予測通り、正の選択バイアスが補正される
   - PLM とIRM の結果は概ね一貫している
   - Poterba et al. によるシンプルなコントロールと結果は近く、それで十分である可能性を示唆している
- DAG で見たような、因果ダイアグラムのわずかな変更が識別に与える影響を分析するのは、Ch12 の sensitivity analysis で実施する

