156 lines
6.2 KiB
R
156 lines
6.2 KiB
R
library(predictionError)
|
|
library(mecor)
|
|
options(amelia.parallel="no",
|
|
amelia.ncpus=1)
|
|
library(Amelia)
|
|
library(Zelig)
|
|
|
|
logistic <- function(x) {1/(1+exp(-1*x))}
|
|
|
|
run_simulation <- function(df, result){
|
|
|
|
accuracy <- df[,mean(w_pred==x)]
|
|
result <- append(result, list(accuracy=accuracy))
|
|
|
|
(model.true <- lm(y ~ x + g, data=df))
|
|
true.ci.Bxy <- confint(model.true)['x',]
|
|
true.ci.Bgy <- confint(model.true)['g',]
|
|
|
|
result <- append(result, list(Bxy.est.true=coef(model.true)['x'],
|
|
Bgy.est.true=coef(model.true)['g'],
|
|
Bxy.ci.upper.true = true.ci.Bxy[2],
|
|
Bxy.ci.lower.true = true.ci.Bxy[1],
|
|
Bgy.ci.upper.true = true.ci.Bgy[2],
|
|
Bgy.ci.lower.true = true.ci.Bgy[1]))
|
|
|
|
(model.feasible <- lm(y~x.obs+g,data=df))
|
|
|
|
feasible.ci.Bxy <- confint(model.feasible)['x.obs',]
|
|
result <- append(result, list(Bxy.est.feasible=coef(model.feasible)['x.obs'],
|
|
Bxy.ci.upper.feasible = feasible.ci.Bxy[2],
|
|
Bxy.ci.lower.feasible = feasible.ci.Bxy[1]))
|
|
|
|
feasible.ci.Bgy <- confint(model.feasible)['g',]
|
|
result <- append(result, list(Bgy.est.feasible=coef(model.feasible)['g'],
|
|
Bgy.ci.upper.feasible = feasible.ci.Bgy[2],
|
|
Bgy.ci.lower.feasible = feasible.ci.Bgy[1]))
|
|
|
|
(model.naive <- lm(y~w+g, data=df))
|
|
|
|
naive.ci.Bxy <- confint(model.naive)['w',]
|
|
naive.ci.Bgy <- confint(model.naive)['g',]
|
|
|
|
result <- append(result, list(Bxy.est.naive=coef(model.naive)['w'],
|
|
Bgy.est.naive=coef(model.naive)['g'],
|
|
Bxy.ci.upper.naive = naive.ci.Bxy[2],
|
|
Bxy.ci.lower.naive = naive.ci.Bxy[1],
|
|
Bgy.ci.upper.naive = naive.ci.Bgy[2],
|
|
Bgy.ci.lower.naive = naive.ci.Bgy[1]))
|
|
|
|
|
|
## multiple imputation when k is observed
|
|
## amelia does great at this one.
|
|
noms <- c()
|
|
if(length(unique(df$x.obs)) <=2){
|
|
noms <- c(noms, 'x.obs')
|
|
}
|
|
|
|
if(length(unique(df$g)) <=2){
|
|
noms <- c(noms, 'g')
|
|
}
|
|
|
|
|
|
amelia.out.k <- amelia(df, m=200, p2s=0, idvars=c('x','w_pred'),noms=noms)
|
|
mod.amelia.k <- zelig(y~x.obs+g, model='ls', data=amelia.out.k$imputations, cite=FALSE)
|
|
(coefse <- combine_coef_se(mod.amelia.k, messages=FALSE))
|
|
|
|
est.x.mi <- coefse['x.obs','Estimate']
|
|
est.x.se <- coefse['x.obs','Std.Error']
|
|
result <- append(result,
|
|
list(Bxy.est.amelia.full = est.x.mi,
|
|
Bxy.ci.upper.amelia.full = est.x.mi + 1.96 * est.x.se,
|
|
Bxy.ci.lower.amelia.full = est.x.mi - 1.96 * est.x.se
|
|
))
|
|
|
|
est.g.mi <- coefse['g','Estimate']
|
|
est.g.se <- coefse['g','Std.Error']
|
|
|
|
result <- append(result,
|
|
list(Bgy.est.amelia.full = est.g.mi,
|
|
Bgy.ci.upper.amelia.full = est.g.mi + 1.96 * est.g.se,
|
|
Bgy.ci.lower.amelia.full = est.g.mi - 1.96 * est.g.se
|
|
))
|
|
|
|
## What if we can't observe k -- most realistic scenario. We can't include all the ML features in a model.
|
|
## amelia.out.nok <- amelia(df, m=200, p2s=0, idvars=c("x","w_pred"), noms=noms)
|
|
## mod.amelia.nok <- zelig(y~x.obs+g, model='ls', data=amelia.out.nok$imputations, cite=FALSE)
|
|
## (coefse <- combine_coef_se(mod.amelia.nok, messages=FALSE))
|
|
|
|
## est.x.mi <- coefse['x.obs','Estimate']
|
|
## est.x.se <- coefse['x.obs','Std.Error']
|
|
## result <- append(result,
|
|
## list(Bxy.est.amelia.nok = est.x.mi,
|
|
## Bxy.ci.upper.amelia.nok = est.x.mi + 1.96 * est.x.se,
|
|
## Bxy.ci.lower.amelia.nok = est.x.mi - 1.96 * est.x.se
|
|
## ))
|
|
|
|
## est.g.mi <- coefse['g','Estimate']
|
|
## est.g.se <- coefse['g','Std.Error']
|
|
|
|
## result <- append(result,
|
|
## list(Bgy.est.amelia.nok = est.g.mi,
|
|
## Bgy.ci.upper.amelia.nok = est.g.mi + 1.96 * est.g.se,
|
|
## Bgy.ci.lower.amelia.nok = est.g.mi - 1.96 * est.g.se
|
|
## ))
|
|
|
|
N <- nrow(df)
|
|
m <- nrow(df[!is.na(x.obs)])
|
|
p <- v <- train <- rep(0,N)
|
|
M <- m
|
|
p[(M+1):(N)] <- 1
|
|
v[1:(M)] <- 1
|
|
df <- df[order(x.obs)]
|
|
y <- df[,y]
|
|
x <- df[,x.obs]
|
|
g <- df[,g]
|
|
w <- df[,w]
|
|
# gmm gets pretty close
|
|
(gmm.res <- predicted_covariates(y, x, g, w, v, train, p, max_iter=100, verbose=TRUE))
|
|
|
|
result <- append(result,
|
|
list(Bxy.est.gmm = gmm.res$beta[1,1],
|
|
Bxy.ci.upper.gmm = gmm.res$confint[1,2],
|
|
Bxy.ci.lower.gmm = gmm.res$confint[1,1],
|
|
gmm.ER_pval = gmm.res$ER_pval
|
|
))
|
|
|
|
result <- append(result,
|
|
list(Bgy.est.gmm = gmm.res$beta[2,1],
|
|
Bgy.ci.upper.gmm = gmm.res$confint[2,2],
|
|
Bgy.ci.lower.gmm = gmm.res$confint[2,1]))
|
|
|
|
|
|
mod.calibrated.mle <- mecor(y ~ MeasError(w, reference = x.obs) + g, df, B=400, method='efficient')
|
|
(mod.calibrated.mle)
|
|
(mecor.ci <- summary(mod.calibrated.mle)$c$ci['x.obs',])
|
|
result <- append(result, list(
|
|
Bxy.est.mecor = mecor.ci['Estimate'],
|
|
Bxy.upper.mecor = mecor.ci['UCI'],
|
|
Bxy.lower.mecor = mecor.ci['LCI'])
|
|
)
|
|
|
|
(mecor.ci <- summary(mod.calibrated.mle)$c$ci['g',])
|
|
|
|
result <- append(result, list(
|
|
Bgy.est.mecor = mecor.ci['Estimate'],
|
|
Bgy.upper.mecor = mecor.ci['UCI'],
|
|
Bgy.lower.mecor = mecor.ci['LCI'])
|
|
)
|
|
|
|
## clean up memory
|
|
## rm(list=c("df","y","x","g","w","v","train","p","amelia.out.k","amelia.out.nok", "mod.calibrated.mle","gmm.res","mod.amelia.k","mod.amelia.nok", "model.true","model.naive","model.feasible"))
|
|
|
|
## gc()
|
|
return(result)
|
|
}
|