@@ -34,9 +34,6 @@ randomizedLasso = function(X,
34
34
noise_scale = 0.5 * sd(y ) * sqrt(mean_diag )
35
35
}
36
36
37
- print(paste(" ridge term" , ridge_term ))
38
- print(paste(" noise scale" , noise_scale ))
39
-
40
37
noise_type = match.arg(noise_type )
41
38
42
39
if (noise_scale > 0 ) {
@@ -333,14 +330,14 @@ conditional_density = function(noise_scale, lasso_soln) {
333
330
randomizedLassoInf = function (X ,
334
331
y ,
335
332
lam ,
336
- sampler = " A" ,
337
333
sigma = NULL ,
338
334
noise_scale = NULL ,
339
335
ridge_term = NULL ,
340
336
condition_subgrad = TRUE ,
341
337
level = 0.9 ,
342
- nsample = 10000 ,
343
- burnin = 2000 ,
338
+ sampler = c(" norejection" , " adaptMCMC" ),
339
+ nsample = 10000 ,
340
+ burnin = 2000 ,
344
341
max_iter = 100 , # how many iterations for each optimization problem
345
342
kkt_tol = 1.e-4 , # tolerance for the KKT conditions
346
343
parameter_tol = 1.e-8 , # tolerance for relative convergence of parameter
@@ -368,7 +365,7 @@ randomizedLassoInf = function(X,
368
365
369
366
active_set = lasso_soln $ active_set
370
367
nactive = length(active_set )
371
- print(paste( " nactive " , nactive ))
368
+
372
369
if (nactive == 0 ){
373
370
return (list (active_set = active_set , pvalues = c(), ci = c()))
374
371
}
@@ -394,10 +391,12 @@ randomizedLassoInf = function(X,
394
391
395
392
ndim = length(lasso_soln $ observed_opt_state )
396
393
397
- if (sampler == " R" ){
394
+ sampler = match.arg(sampler )
395
+
396
+ if (sampler == " adaptMCMC" ){
398
397
S = sample_opt_variables(lasso_soln , jump_scale = rep(1 / sqrt(n ), ndim ), nsample = nsample )
399
398
opt_samples = as.matrix(S $ samples [(burnin + 1 ): nsample ,,drop = FALSE ])
400
- } else if (sampler == " A " ) {
399
+ } else if (sampler == " norejection " ) {
401
400
opt_samples = gaussian_sampler(noise_scale ,
402
401
lasso_soln $ observed_opt_state ,
403
402
cur_opt_transform $ linear_term ,
0 commit comments