Last updated: 2018-08-15
library(mashr)
Loading required package: ashr
library(knitr)
library(kableExtra)
source('../code/estimate_cor.R')
source('../code/generateDataV.R')
source('../code/summary.R')
We use EM algorithm to update V.
\[ P(\hat{B},B,Z|\rho, \pi) = \prod_{i=1}^{n} \prod_{k=0}^{K}\left[\pi_{k}N(\hat{b}_{i}; b_{i}, V)N(b_{i}; 0, U_{k})\right]^{\mathbb{I}(z_{i}=k)} \]
\[ \mathbb{E}_{Z,B|\hat{B}} \log P(\hat{B},B,Z|\rho, \pi) = \sum_{i=1}^{n} \sum_{k=0}^{K} P(z_{i}=k|\hat{b}_{i})\left[ \log \pi_{k} + \mathbb{E}_{B|\hat{B}}(\log N(\hat{b}_{i}; b_{i}, V)) + \mathbb{E}_{B|\hat{B}}(\log N(b_{i}; 0, U_{k})) \right] \]
\[ \begin{align*} \log N(\hat{b}_{i}; b_{i}, V) + \log N(b_{i}; 0, U_{k}) &= -\frac{p}{2}\log 2\pi -\frac{1}{2}\log |V| - \frac{1}{2}(\hat{b}_{i}-b_{i})^{T}V^{-1}(\hat{b}_{i}-b_{i}) -\frac{p}{2}\log 2\pi -\frac{1}{2}\log |U_{k}| - \frac{1}{2}b_{i}^{T}U_{k}^{-1}b_{i} \\ &= -p\log 2\pi -\frac{1}{2}\log |U_{k}| -\frac{1}{2}\log |V| - \frac{1}{2}\hat{b}_{i}^{T}V^{-1}\hat{b}_{i} + \hat{b}_{i}^{T}V^{-1}b_{i} -\frac{1}{2}b_{i}^{T}V^{-1}b_{i} - \frac{1}{2}b_{i}^{T}U_{k}^{-1}b_{i} \\ \mathbb{E}_{b_{i}|\hat{b}_{i}}\left[ \log N(\hat{b}_{i}; b_{i}, V) + \log N(b_{i}; 0, U_{k}) \right] &= -p\log 2\pi -\frac{1}{2}\log |U_{k}| -\frac{1}{2}\log |V| - \frac{1}{2}\hat{b}_{i}^{T}V^{-1}\hat{b}_{i} + \hat{b}_{i}^{T}V^{-1}\mathbb{E}(b_{i}|\hat{b}_{i}) -\frac{1}{2}tr\left(V^{-1}\mathbb{E}(b_{i}b_{i}^{T}|\hat{b}_{i}) \right) - \frac{1}{2}tr\left(U_{k}^{-1}\mathbb{E}(b_{i}b_{i}^{T}|\hat{b}_{i}) \right) \end{align*} \]
\[ \gamma_{Z_{i}}(k) = P(z_{i}=k|X_{i}) = \frac{\pi_{k}N(x_{i}; 0, V+U_{k})}{\sum_{k'=0}^{K}\pi_{k'}N(x_{i}; 0, V + U_{k'})} \]
\(V\): \[ \begin{align*} f(V^{-1}) = \sum_{i=1}^{n} \sum_{k=0}^{K} \gamma_{Z_{i}}(k)\left[ -p\log 2\pi -\frac{1}{2}\log |U_{k}| -\frac{1}{2}\log |V| - \frac{1}{2}\hat{b}_{i}^{T}V^{-1}\hat{b}_{i} + \hat{b}_{i}^{T}V^{-1}\mathbb{E}(b_{i}|\hat{b}_{i}) -\frac{1}{2}tr\left(V^{-1}\mathbb{E}(b_{i}b_{i}^{T}|\hat{b}_{i}) \right) - \frac{1}{2}tr\left(U_{k}^{-1}\mathbb{E}(b_{i}b_{i}^{T}|\hat{b}_{i}) \right) \right] \end{align*} \]
\[ \begin{align*} f(V^{-1})' &= \sum_{i=1}^{n} \sum_{k=0}^{K} \gamma_{Z_{i}}(k)\left[ \frac{1}{2}V - \frac{1}{2}\hat{b}_{i}\hat{b}_{i}^{T} + \mathbb{E}(b_{i}|\hat{b}_{i})\hat{b}_{i}^{T} - \frac{1}{2} \mathbb{E}(b_{i}b_{i}^{T}|\hat{b}_{i}) \right] = 0 \\ \frac{1}{2}Vn &= \sum_{i=1}^{n} \sum_{k=0}^{K} \gamma_{Z_{i}}(k)\left[\frac{1}{2}\hat{b}_{i}\hat{b}_{i}^{T} - \mathbb{E}(b_{i}|\hat{b}_{i})\hat{b}_{i}^{T} + \frac{1}{2} \mathbb{E}(b_{i}b_{i}^{T}|\hat{b}_{i}) \right] \\ \hat{V} &= \frac{1}{n} \sum_{i=1}^{n} \left[\hat{b}_{i}\hat{b}_{i}^{T} - 2\mathbb{E}(b_{i}|\hat{b}_{i})\hat{b}_{i}^{T} + \mathbb{E}(b_{i}b_{i}^{T}|\hat{b}_{i}) \right] \end{align*} \]
Algorithm:
Input: X, Ulist, init_V
Compute loglikelihood
delta = 1
while delta > tol
Given rho, Estimate pi using convex method (current mash method)
M step: update V --> cov2cor(V)
Compute loglikelihood
Update delta
\[ \hat{\beta}|\beta \sim N_{2}(\hat{\beta}; \beta, \left(\begin{matrix} 1 & 0.5 \\ 0.5 & 1 \end{matrix}\right)) \]
\[ \beta \sim \frac{1}{4}\delta_{0} + \frac{1}{4}N_{2}(0, \left(\begin{matrix} 1 & 0 \\ 0 & 0 \end{matrix}\right)) + \frac{1}{4}N_{2}(0, \left(\begin{matrix} 0 & 0 \\ 0 & 1 \end{matrix}\right)) + \frac{1}{4}N_{2}(0, \left(\begin{matrix} 1 & 1 \\ 1 & 1 \end{matrix}\right)) \]
n = 4000
set.seed(1)
n = 4000; p = 2
Sigma = matrix(c(1,0.5,0.5,1),p,p)
U0 = matrix(0,2,2)
U1 = U0; U1[1,1] = 1
U2 = U0; U2[2,2] = 1
U3 = matrix(1,2,2)
Utrue = list(U0=U0, U1=U1, U2=U2, U3=U3)
data = generate_data(n, p, Sigma, Utrue)
m.data = mash_set_data(data$Bhat, data$Shat)
U.c = cov_canonical(m.data)
grid = mashr:::autoselect_grid(m.data, sqrt(2))
Ulist = mashr:::normalize_Ulist(U.c)
xUlist = mashr:::expand_cov(Ulist,grid,usepointmass = TRUE)
result <- mixture.EMV.times(data$Bhat, xUlist, init_V = list(diag(ncol(data$Bhat)), matrix(c(1,-0.8,-0.8,1),2,2)), grid=1)
plot(result$result$log_liks)
The estimated V is 1, 0.4679947, 0.4679947, 1.
m.data.em = mash_set_data(data$Bhat, data$Shat, V = result$result$V)
U.c = cov_canonical(m.data.em)
m.em = mash(m.data.em, U.c, verbose= FALSE)
null.ind = which(apply(data$B,1,sum) == 0)
The log likelihood is -1.23036710^{4}. There are 24 significant samples, 0 false positives. The RRMSE is 0.58112.
The estimated pi
is
barplot(get_estimated_pi(m.em), las=2, cex.names = 0.7, main='EM V', ylim=c(0,0.8))
The ROC curve:
m.data.correct = mash_set_data(data$Bhat, data$Shat, V=Sigma)
m.correct = mash(m.data.correct, U.c, verbose = FALSE)
m.correct.seq = ROC.table(data$B, m.correct)
m.em.seq = ROC.table(data$B, m.em)
sessionInfo()
R version 3.5.1 (2018-07-02)
Platform: x86_64-apple-darwin15.6.0 (64-bit)
Running under: macOS High Sierra 10.13.6
Matrix products: default
BLAS: /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libRblas.0.dylib
LAPACK: /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libRlapack.dylib
locale:
[1] en_US.UTF-8/en_US.UTF-8/en_US.UTF-8/C/en_US.UTF-8/en_US.UTF-8
attached base packages:
[1] stats graphics grDevices utils datasets methods base
other attached packages:
[1] kableExtra_0.9.0 knitr_1.20 mashr_0.2-11 ashr_2.2-10
loaded via a namespace (and not attached):
[1] Rcpp_0.12.18 compiler_3.5.1 pillar_1.3.0
[4] plyr_1.8.4 iterators_1.0.10 tools_3.5.1
[7] digest_0.6.15 viridisLite_0.3.0 evaluate_0.11
[10] tibble_1.4.2 lattice_0.20-35 pkgconfig_2.0.1
[13] rlang_0.2.1 Matrix_1.2-14 foreach_1.4.4
[16] rstudioapi_0.7 yaml_2.2.0 parallel_3.5.1
[19] mvtnorm_1.0-8 xml2_1.2.0 httr_1.3.1
[22] stringr_1.3.1 REBayes_1.3 hms_0.4.2
[25] rprojroot_1.3-2 grid_3.5.1 R6_2.2.2
[28] rmarkdown_1.10 rmeta_3.0 readr_1.1.1
[31] magrittr_1.5 scales_0.5.0 backports_1.1.2
[34] codetools_0.2-15 htmltools_0.3.6 MASS_7.3-50
[37] rvest_0.3.2 assertthat_0.2.0 colorspace_1.3-2
[40] stringi_1.2.4 Rmosek_8.0.69 munsell_0.5.0
[43] pscl_1.5.2 doParallel_1.0.11 truncnorm_1.0-8
[46] SQUAREM_2017.10-1 crayon_1.3.4
This R Markdown site was created with workflowr