Skip to content

Commit e4b75f3

Browse files
BF: computing inverse correctly after centering
1 parent 2960b99 commit e4b75f3

File tree

2 files changed

+66
-1
lines changed

2 files changed

+66
-1
lines changed

selectiveInference/R/funs.ROSI.R

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -276,7 +276,14 @@ approximate_BN = function(X, active_set){
276276
nactive=length(active_set)
277277

278278
svdX = svd(X)
279-
inv = solve(svdX$u %*% diag(svdX$d^2) %*% t(svdX$u))
279+
280+
rank = sum(svdX$d > max(svdX$d) * 1.e-9)
281+
inv_d = 1. / svdX$d
282+
if (rank < length(svdX$d)) {
283+
inv_d[(rank+1):length(svdX$d)] = 0.
284+
}
285+
inv = svdX$u %*% diag(inv_d^2) %*% t(svdX$u)
286+
280287
D = rep(0, nactive)
281288
for (i in 1:nactive){
282289
var = active_set[i]

selectiveInference/man/ROSI.Rd

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -111,9 +111,13 @@ p is comparable to n. Ignored when n<p and set to TRUE.
111111

112112
\references{
113113

114+
114115
Keli Liu, Jelena Markovic, Robert Tibshirani. More powerful post-selection
115116
inference, with application to the Lasso. arXiv:1801.09037
116117

118+
Tom Boot, Didier Nibbering. Inference in high-dimensional linear regression models.
119+
arXiv:1703.03282
120+
117121
}
118122
\author{Jelena Markovic, Jonathan Taylor}
119123

@@ -122,6 +126,7 @@ inference, with application to the Lasso. arXiv:1801.09037
122126
library(selectiveInference)
123127
library(glmnet)
124128
set.seed(43)
129+
125130
n = 200
126131
p = 100
127132
s = 2
@@ -153,5 +158,58 @@ out = ROSI(x,
153158
dispersion=sigma^2)
154159
out
155160

161+
# an alternate approximate inverse from Boot and Nibbering
162+
163+
out = ROSI(x,
164+
y,
165+
beta,
166+
lambda,
167+
dispersion=sigma^2,
168+
debiasing_method="BN")
169+
out
170+
171+
# wide matrices work too
172+
173+
n = 100
174+
p = 200
175+
s = 2
176+
sigma = 1
177+
178+
x = matrix(rnorm(n*p),n,p)
179+
x = scale(x,TRUE,TRUE)
180+
181+
beta = c(rep(10, s), rep(0,p-s)) / sqrt(n)
182+
y = x \%*\% beta + sigma*rnorm(n)
183+
184+
# first run glmnet
185+
gfit = glmnet(x,y,standardize=FALSE)
186+
187+
# extract coef for a given lambda; note the 1/n factor!
188+
# (and we don't save the intercept term)
189+
lambda = 4 * sqrt(n)
190+
lambda_glmnet = 4 / sqrt(n)
191+
beta = selectiveInference:::solve_problem_glmnet(x,
192+
y,
193+
lambda_glmnet,
194+
penalty_factor=rep(1,p),
195+
family="gaussian")
196+
# compute fixed lambda p-values and selection intervals
197+
out = ROSI(x,
198+
y,
199+
beta,
200+
lambda,
201+
dispersion=sigma^2)
202+
out
203+
204+
# an alternate approximate inverse
205+
206+
out = ROSI(x,
207+
y,
208+
beta,
209+
lambda,
210+
dispersion=sigma^2,
211+
debiasing_method="BN")
212+
out
213+
156214
}
157215

0 commit comments

Comments
 (0)