@@ -269,19 +269,19 @@ fixedLasso.poly=
269
269
# # Approximates inverse covariance matrix theta
270
270
271
271
debiasingMatrix = function (Sigma ,
272
- nsample ,
273
- rows ,
274
- verbose = FALSE ,
275
- mu = NULL , # starting value of mu
276
- linesearch = TRUE , # do a linesearch?
277
- resol = 1.2 , # multiplicative factor for linesearch
278
- max_active = NULL , # how big can active set get?
279
- max_try = 10 , # how many steps in linesearch?
280
- warn_kkt = FALSE , # warn if KKT does not seem to be satisfied?
281
- max_iter = 100 , # how many iterations for each optimization problem
282
- kkt_tol = 1.e-4 , # tolerance for the KKT conditions
283
- objective_tol = 1.e-4 # tolerance for relative decrease in objective
284
- ) {
272
+ nsample ,
273
+ rows ,
274
+ verbose = FALSE ,
275
+ mu = NULL , # starting value of mu
276
+ linesearch = TRUE , # do a linesearch?
277
+ scaling_factor = 1.5 , # multiplicative factor for linesearch
278
+ max_active = NULL , # how big can active set get?
279
+ max_try = 10 , # how many steps in linesearch?
280
+ warn_kkt = FALSE , # warn if KKT does not seem to be satisfied?
281
+ max_iter = 100 , # how many iterations for each optimization problem
282
+ kkt_tol = 1.e-4 , # tolerance for the KKT conditions
283
+ objective_tol = 1.e-8 # tolerance for relative decrease in objective
284
+ ) {
285
285
286
286
287
287
if (is.null(max_active )) {
@@ -310,7 +310,7 @@ debiasingMatrix = function(Sigma,
310
310
row ,
311
311
mu ,
312
312
linesearch = linesearch ,
313
- resol = resol ,
313
+ scaling_factor = scaling_factor ,
314
314
max_active = max_active ,
315
315
max_try = max_try ,
316
316
warn_kkt = FALSE ,
@@ -322,7 +322,12 @@ debiasingMatrix = function(Sigma,
322
322
warning(" Solution for row of M does not seem to be feasible" )
323
323
}
324
324
325
- M [idx ,] = output $ soln ;
325
+ if (! is.null(output $ soln )) {
326
+ M [idx ,] = output $ soln ;
327
+ } else {
328
+ stop(paste(" Unable to approximate inverse row " , row ));
329
+ }
330
+
326
331
idx = idx + 1 ;
327
332
}
328
333
return (M )
@@ -332,13 +337,13 @@ debiasingRow = function (Sigma,
332
337
row ,
333
338
mu ,
334
339
linesearch = TRUE , # do a linesearch?
335
- resol = 1.2 , # multiplicative factor for linesearch
340
+ scaling_factor = 1.2 , # multiplicative factor for linesearch
336
341
max_active = NULL , # how big can active set get?
337
342
max_try = 10 , # how many steps in linesearch?
338
343
warn_kkt = FALSE , # warn if KKT does not seem to be satisfied?
339
- max_iter = 100 , # how many iterations for each optimization problem
344
+ max_iter = 100 , # how many iterations for each optimization problem
340
345
kkt_tol = 1.e-4 , # tolerance for the KKT conditions
341
- objective_tol = 1.e-4 # tolerance for relative decrease in objective
346
+ objective_tol = 1.e-8 # tolerance for relative decrease in objective
342
347
) {
343
348
344
349
p = nrow(Sigma )
@@ -368,7 +373,17 @@ debiasingRow = function (Sigma,
368
373
369
374
while (counter_idx < max_try ) {
370
375
371
- result = solve_QP(Sigma , mu , max_iter , soln , linear_func , gradient , ever_active , nactive , kkt_tol , objective_tol , max_active )
376
+ result = solve_QP(Sigma ,
377
+ mu ,
378
+ max_iter ,
379
+ soln ,
380
+ linear_func ,
381
+ gradient ,
382
+ ever_active ,
383
+ nactive ,
384
+ kkt_tol ,
385
+ objective_tol ,
386
+ max_active )
372
387
373
388
iter = result $ iter
374
389
@@ -390,13 +405,13 @@ debiasingRow = function (Sigma,
390
405
if ((iter < (max_iter + 1 )) && (counter_idx > 1 )) {
391
406
break ; # we've found a feasible point and solved the problem
392
407
}
393
- mu = mu * resol ;
408
+ mu = mu * scaling_factor ;
394
409
} else { # trying to drop the bound parameter further
395
410
if ((iter == (max_iter + 1 )) && (counter_idx > 1 )) {
396
411
result = last_output ; # problem seems infeasible because we didn't solve it
397
412
break ; # so we revert to previously found solution
398
413
}
399
- mu = mu / resol ;
414
+ mu = mu / scaling_factor ;
400
415
}
401
416
402
417
# If the active set has grown to a certain size
0 commit comments