Skip to content

Commit 7862f74

Browse files
committed
Removed std::cout and hacked in passing of pdlp_log_file_
1 parent d751d4e commit 7862f74

File tree

8 files changed

+181
-135
lines changed

8 files changed

+181
-135
lines changed

highs/pdlp/cupdlp/cupdlp_step.c

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -346,8 +346,9 @@ cupdlp_retcode PDHG_Init_Step_Sizes(CUPDLPwork *pdhg) {
346346
stepsize->dDualStep = stepsize->dPrimalStep;
347347
stepsize->dPrimalStep /= sqrt(stepsize->dBeta);
348348
stepsize->dDualStep *= sqrt(stepsize->dBeta);
349-
cupdlp_printf("Initial step sizes from power method lambda = %g: primal = %g; dual = %g\n",
350-
power_method_lambda, stepsize->dPrimalStep, stepsize->dDualStep);
349+
if (pdhg->settings->nLogLevel > 1)
350+
cupdlp_printf("Initial step sizes from power method lambda = %g: primal = %g; dual = %g\n",
351+
power_method_lambda, stepsize->dPrimalStep, stepsize->dDualStep);
351352
} else {
352353
stepsize->dTheta = 1.0;
353354

highs/pdlp/hipdlp/pdhg.cc

Lines changed: 31 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
user/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
1+
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22
/* */
33
/* This file is part of the HiGHS linear optimization suite */
44
/* */
@@ -272,24 +272,31 @@ void PDLPSolver::solve(std::vector<double>& x, std::vector<double>& y) {
272272
Timer solver_timer;
273273

274274
const HighsLp& lp = lp_;
275+
276+
pdlp_log_file_ = fopen("HiPDLP.log", "w");
277+
assert(pdlp_log_file_);
278+
275279
// --- 0.Using PowerMethod to estimate the largest eigenvalue ---
276280
const double op_norm_sq = PowerMethod();
277281
// Set step sizes based on the operator norm to ensure convergence
278282
// A safe choice satisfying eta * omega * ||A||^2 < 1
279283
step_.passLp(&lp_);
280284
step_.passLogOptions(&params_.log_options_);
281285
step_.passDebugLogFile(pdlp_log_file_);
282-
StepSizeConfig step_size = step_.InitializeStepSizesPowerMethod(lp, op_norm_sq);
286+
StepSizeConfig step_size =
287+
step_.InitializeStepSizesPowerMethod(lp, op_norm_sq);
283288
const double fixed_eta = 0.99 / sqrt(op_norm_sq);
284289
PrimalDualParams working_params = params_;
285290

286291
working_params.omega = std::sqrt(step_size.dual_step / step_size.primal_step);
287292
working_params.eta = std::sqrt(step_size.primal_step * step_size.dual_step);
288293
current_eta_ = working_params.eta; // Initial step size for adaptive strategy
289-
highsLogUser(params_.log_options_, HighsLogType::kInfo, "Using power method step sizes: eta = %g, omega = %g\n",
290-
working_params.eta, working_params.omega);
294+
highsLogUser(params_.log_options_, HighsLogType::kInfo,
295+
"Using power method step sizes: eta = %g, omega = %g\n",
296+
working_params.eta, working_params.omega);
291297

292-
highsLogUser(params_.log_options_, HighsLogType::kInfo,
298+
highsLogUser(
299+
params_.log_options_, HighsLogType::kInfo,
293300
"Initial step sizes from power method lambda = %g: primal = %g; dual = "
294301
"%g\n",
295302
step_size.power_method_lambda, step_size.primal_step,
@@ -318,9 +325,6 @@ void PDLPSolver::solve(std::vector<double>& x, std::vector<double>& y) {
318325

319326
logger_.print_iteration_header();
320327

321-
pdlp_log_file_ = fopen("HiPDLP.log", "w");
322-
assert(pdlp_log_file_);
323-
324328
// --- 2. Main PDHG Loop ---
325329
// A single loop handles max iterations, convergence, and restarts.
326330
for (int iter = 0; iter < params_.max_iterations; ++iter) {
@@ -348,21 +352,20 @@ void PDLPSolver::solve(std::vector<double>& x, std::vector<double>& y) {
348352
switch (params_.step_size_strategy) {
349353
case StepSizeStrategy::FIXED:
350354
step_.UpdateIteratesFixed(lp, working_params, fixed_eta, x, y, Ax_new,
351-
x_current_, y_current_, Ax_current,
352-
pdlp_log_file_);
355+
x_current_, y_current_, Ax_current);
353356
break;
354357

355358
case StepSizeStrategy::ADAPTIVE:
356-
step_.UpdateIteratesAdaptive(
357-
lp, working_params, x, y, Ax_new, x_current_, y_current_,
358-
Ax_current, ATy_current, current_eta_, iter, pdlp_log_file_);
359+
step_.UpdateIteratesAdaptive(lp, working_params, x, y, Ax_new,
360+
x_current_, y_current_, Ax_current,
361+
ATy_current, current_eta_, iter);
359362
break;
360363

361364
case StepSizeStrategy::MALITSKY_POCK:
362365
step_success = step_.UpdateIteratesMalitskyPock(
363366
lp, working_params, x, y, Ax_new, x_current_, y_current_,
364367
Ax_current, ATy_current, current_eta_, ratio_last_two_step_sizes_,
365-
num_rejected_steps_, first_malitsky_iteration, pdlp_log_file_);
368+
num_rejected_steps_, first_malitsky_iteration);
366369

367370
if (!step_success) {
368371
std::cerr << "Malitsky-Pock step failed at iteration " << iter
@@ -452,9 +455,8 @@ void PDLPSolver::solve(std::vector<double>& x, std::vector<double>& y) {
452455
}
453456

454457
// Perform the primal weight update using z^{n,0} and z^{n-1,0}
455-
PDHG_Compute_Step_Size_Ratio(working_params,
456-
restart_x, restart_y,
457-
x_at_last_restart_, y_at_last_restart_);
458+
PDHG_Compute_Step_Size_Ratio(working_params, restart_x, restart_y,
459+
x_at_last_restart_, y_at_last_restart_);
458460

459461
x_at_last_restart_ = restart_x; // Current becomes the new last
460462
y_at_last_restart_ = restart_y;
@@ -766,10 +768,10 @@ double PDLPSolver::PowerMethod() {
766768
// kYanyuPowerMethod;
767769
// kYanyuPowerMethodDev;
768770
kCuPdlpAATPowerMethod;
769-
highsLogUser(params_.log_options_, HighsLogType::kInfo, "Power method: %s\n", power_method == kYanyuPowerMethod ? "Yanyu"
770-
: power_method == kYanyuPowerMethodDev
771-
? "Yanyu dev"
772-
: "CuPdlp-C");
771+
highsLogUser(params_.log_options_, HighsLogType::kInfo, "Power method: %s\n",
772+
power_method == kYanyuPowerMethod ? "Yanyu"
773+
: power_method == kYanyuPowerMethodDev ? "Yanyu dev"
774+
: "CuPdlp-C");
773775
// Dev version of Yanyu power method (based on A'A) has
774776
//
775777
// * First iterate as vector of ones
@@ -794,7 +796,9 @@ double PDLPSolver::PowerMethod() {
794796
int log_iters =
795797
log_level == LogLevel::kVerbose || log_level == LogLevel::kDebug;
796798

797-
if (log_iters) highsLogUser(params_.log_options_, HighsLogType::kInfo, "It lambda dl_lambda\n");
799+
if (log_iters)
800+
highsLogUser(params_.log_options_, HighsLogType::kInfo,
801+
"It lambda dl_lambda\n");
798802

799803
if (power_method == kYanyuPowerMethodDev) {
800804
// Start from a vector
@@ -828,7 +832,8 @@ double PDLPSolver::PowerMethod() {
828832
linalg::normalize(z_vec); // Normalize the result
829833
x_vec = z_vec;
830834
if (log_iters)
831-
highsLogUser(params_.log_options_, HighsLogType::kInfo, "%2d %12.6g %11.4g\n", iter, op_norm_sq, dl_op_norm_sq);
835+
highsLogUser(params_.log_options_, HighsLogType::kInfo,
836+
"%2d %12.6g %11.4g\n", iter, op_norm_sq, dl_op_norm_sq);
832837
} else {
833838
if (power_method == kYanyuPowerMethodDev) {
834839
// Yanyu power method without "convergence" check
@@ -868,7 +873,9 @@ double PDLPSolver::PowerMethod() {
868873
}
869874
double dl_lambda = std::fabs(lambda - previous_lambda);
870875
previous_lambda = lambda;
871-
if (log_iters) highsLogUser(params_.log_options_, HighsLogType::kInfo, "%2d %12.6g %11.4g\n", iter, lambda, dl_lambda);
876+
if (log_iters)
877+
highsLogUser(params_.log_options_, HighsLogType::kInfo,
878+
"%2d %12.6g %11.4g\n", iter, lambda, dl_lambda);
872879
}
873880
}
874881
if (power_method != kYanyuPowerMethod) op_norm_sq = lambda;

highs/pdlp/hipdlp/restart.cc

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,11 @@
1111
// restart.cc
1212
#include "restart.hpp"
1313

14-
#include "io/HighsIO.h" // For pdlpLogging
15-
1614
#include <algorithm>
1715
#include <cmath>
1816

17+
#include "io/HighsIO.h" // For pdlpLogging
18+
1919
// Initializes the restart scheme with parameters and initial results
2020
void RestartScheme::Initialize(const SolverResults& results) {
2121
strategy_ = params_->restart_strategy;
@@ -92,13 +92,19 @@ RestartInfo RestartScheme::Check(int current_iter,
9292
(candidate_score > last_candidate_score_);
9393

9494
if (artificial_restart) {
95-
highsLogUser(*log_options_, HighsLogType::kInfo, "Artificial restart triggered at iteration %d\n", current_iter);
95+
highsLogUser(*log_options_, HighsLogType::kInfo,
96+
"Artificial restart triggered at iteration %d\n",
97+
current_iter);
9698
info.should_restart = true;
9799
} else if (sufficient_decay) {
98-
highsLogUser(*log_options_, HighsLogType::kInfo, "Sufficient decay triggered at iteration %d\n", current_iter);
100+
highsLogUser(*log_options_, HighsLogType::kInfo,
101+
"Sufficient decay triggered at iteration %d\n",
102+
current_iter);
99103
info.should_restart = true;
100104
} else if (necessary_decay) {
101-
highsLogUser(*log_options_, HighsLogType::kInfo, "Necessary decay triggered at iteration %d\n", current_iter);
105+
highsLogUser(*log_options_, HighsLogType::kInfo,
106+
"Necessary decay triggered at iteration %d\n",
107+
current_iter);
102108
info.should_restart = true;
103109
} else {
104110
info.should_restart = false;

highs/pdlp/hipdlp/restart.hpp

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,9 @@ class RestartScheme {
3939

4040
int GetLastRestartIter() const { return last_restart_iter_; }
4141
void passParams(const PrimalDualParams* params) { params_ = params; };
42-
void passLogOptions(const HighsLogOptions* log_options) { log_options_ = log_options; };
42+
void passLogOptions(const HighsLogOptions* log_options) {
43+
log_options_ = log_options;
44+
};
4345

4446
private:
4547
const PrimalDualParams* params_;

highs/pdlp/hipdlp/scaling.cc

Lines changed: 30 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,8 @@
1515
#include <cmath>
1616
#include <iostream>
1717

18-
#include "linalg.hpp"
19-
2018
#include "io/HighsIO.h" // For pdlpLogging
19+
#include "linalg.hpp"
2120

2221
void Scaling::Initialize(const HighsLp& lp) {
2322
col_scale_.assign(lp.num_col_, 1.0);
@@ -31,26 +30,31 @@ void Scaling::Initialize(const HighsLp& lp) {
3130

3231
void Scaling::LogMatrixNorms(const std::string& stage) {
3332
const HighsLp& lp = *lp_;
34-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "\n--- Matrix Norms %d ---\n", stage.c_str());
33+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
34+
"\n--- Matrix Norms %d ---\n", stage.c_str());
3535

3636
if (lp.num_col_ == 0 || lp.num_row_ == 0) {
37-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "Matrix is empty\n");
37+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
38+
"Matrix is empty\n");
3839
return;
3940
}
4041

4142
// --- Calculate and Log Column Norms (Infinity Norm) ---
42-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "Column Infinity Norms:\n");
43+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
44+
"Column Infinity Norms:\n");
4345
for (HighsInt iCol = 0; iCol < lp.num_col_; ++iCol) {
4446
double max_abs_val = 0.0;
4547
for (HighsInt iEl = lp.a_matrix_.start_[iCol];
4648
iEl < lp.a_matrix_.start_[iCol + 1]; ++iEl) {
4749
max_abs_val = std::max(max_abs_val, std::abs(lp.a_matrix_.value_[iEl]));
4850
}
49-
highsLogUser(params_->log_options_, HighsLogType::kInfo, " Col %d: %g\n", iCol, max_abs_val);
51+
highsLogUser(params_->log_options_, HighsLogType::kInfo, " Col %d: %g\n",
52+
iCol, max_abs_val);
5053
}
5154

5255
// --- Calculate and Log Row Norms (Infinity Norm) ---
53-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "Row Infinity Norms:\n");
56+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
57+
"Row Infinity Norms:\n");
5458
std::vector<double> row_max_abs_vals(lp.num_row_, 0.0);
5559
for (HighsInt iCol = 0; iCol < lp.num_col_; ++iCol) {
5660
for (HighsInt iEl = lp.a_matrix_.start_[iCol];
@@ -62,30 +66,37 @@ void Scaling::LogMatrixNorms(const std::string& stage) {
6266
}
6367

6468
for (HighsInt iRow = 0; iRow < lp.num_row_; ++iRow) {
65-
highsLogUser(params_->log_options_, HighsLogType::kInfo, " Row %d: %g\n", iRow, row_max_abs_vals[iRow]);
69+
highsLogUser(params_->log_options_, HighsLogType::kInfo, " Row %d: %g\n",
70+
iRow, row_max_abs_vals[iRow]);
6671
}
67-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "-------------------------\n");
72+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
73+
"-------------------------\n");
6874
}
6975

7076
void Scaling::scaleProblem() {
7177
if (params_->scaling_method == ScalingMethod::NONE) {
72-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "No scaling applied\n");
78+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
79+
"No scaling applied\n");
7380
return;
7481
}
7582

76-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "Applying scaling method: %d\n",
77-
static_cast<int>(params_->scaling_method));
83+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
84+
"Applying scaling method: %d\n",
85+
static_cast<int>(params_->scaling_method));
7886
if (params_->use_pc_scaling) {
79-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "Applying Pock-Chambolle scaling...\n");
87+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
88+
"Applying Pock-Chambolle scaling...\n");
8089
ApplyPockChambolleScaling();
8190
}
8291
if (params_->use_ruiz_scaling) {
83-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "Applying Ruiz scaling...\n");
92+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
93+
"Applying Ruiz scaling...\n");
8494
ApplyRuizScaling();
8595
}
8696
if (params_->use_l2_scaling ||
8797
params_->scaling_method == ScalingMethod::L2_NORM) {
88-
highsLogUser(params_->log_options_, HighsLogType::kInfo, "Applying L2 norm scaling...\n");
98+
highsLogUser(params_->log_options_, HighsLogType::kInfo,
99+
"Applying L2 norm scaling...\n");
89100
ApplyL2Scaling();
90101
}
91102

@@ -140,7 +151,8 @@ void Scaling::ApplyRuizScaling() {
140151
}
141152
}
142153
} else {
143-
highsLogUser(params_->log_options_, HighsLogType::kError, "Currently only support infinity norm for Ruiz scaling\n");
154+
highsLogUser(params_->log_options_, HighsLogType::kError,
155+
"Currently only support infinity norm for Ruiz scaling\n");
144156
exit(1);
145157
}
146158

@@ -159,7 +171,8 @@ void Scaling::ApplyRuizScaling() {
159171

160172
void Scaling::ApplyPockChambolleScaling() {
161173
if (params_->pc_alpha < 0.0 || params_->pc_alpha > 2.0) {
162-
highsLogUser(params_->log_options_, HighsLogType::kError, "PC alpha should be in [0, 2]\n");
174+
highsLogUser(params_->log_options_, HighsLogType::kError,
175+
"PC alpha should be in [0, 2]\n");
163176
exit(1);
164177
}
165178

highs/pdlp/hipdlp/scaling.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ class Scaling {
2727
void unscaleSolution(std::vector<double>& x, std::vector<double>& y) const;
2828
void passLp(HighsLp* lp) { lp_ = lp; };
2929
void passParams(const PrimalDualParams* params) { params_ = params; };
30-
void LogMatrixNorms(const std::string& stage) ;
30+
void LogMatrixNorms(const std::string& stage);
3131
// Get scaling vectors (for unscaling solution later)
3232
bool IsScaled() const { return is_scaled_; }
3333
const std::vector<double>& GetColScaling() const { return col_scale_; }

0 commit comments

Comments
 (0)