Skip to content

Commit 56141de

Browse files
committed
fix beta calcul
1 parent 44cf86a commit 56141de

File tree

3 files changed

+51
-38
lines changed

3 files changed

+51
-38
lines changed

highs/pdlp/cupdlp/cupdlp_step.c

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -164,6 +164,7 @@ void PDHG_Compute_Step_Size_Ratio(CUPDLPwork *pdhg) {
164164

165165
cupdlp_float dDiffPrimal = 0.0;
166166
cupdlp_diffTwoNorm(pdhg, x->data, iterates->xLastRestart, problem->nCols, &dDiffPrimal);
167+
167168
cupdlp_float dDiffDual = 0.0;
168169
cupdlp_diffTwoNorm(pdhg, y->data, iterates->yLastRestart, problem->nRows, &dDiffDual);
169170

highs/pdlp/hipdlp/pdhg.cc

Lines changed: 48 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -417,7 +417,21 @@ void PDLPSolver::solve(std::vector<double>& x, std::vector<double>& y) {
417417

418418
// Reset the average iterate accumulation
419419
int inner_iter = iter - restart_scheme_.GetLastRestartIter();
420-
UpdateAverageIterates(x_current_, y_current_, working_params, inner_iter);
420+
//UpdateAverageIterates(x_current_, y_current_, working_params, inner_iter);
421+
422+
// Compute averages WITHOUT updating the accumulator
423+
if (sum_weights_ > 0) {
424+
for (size_t i = 0; i < x_avg_.size(); ++i) {
425+
x_avg_[i] = x_sum_[i] / sum_weights_;
426+
}
427+
for (size_t i = 0; i < y_avg_.size(); ++i) {
428+
y_avg_[i] = y_sum_[i] / sum_weights_;
429+
}
430+
} else {
431+
// First iteration after restart or initial iteration
432+
x_avg_ = x_current_;
433+
y_avg_ = y_current_;
434+
}
421435

422436
linalg::Ax(lp, x_avg_, Ax_avg);
423437
linalg::ATy(lp, y_avg_, ATy_avg);
@@ -485,6 +499,7 @@ void PDLPSolver::solve(std::vector<double>& x, std::vector<double>& y) {
485499
// Perform the primal weight update using z^{n,0} and z^{n-1,0}
486500
PDHG_Compute_Step_Size_Ratio(working_params, restart_x, restart_y,
487501
x_at_last_restart_, y_at_last_restart_);
502+
current_eta_ = working_params.eta;
488503
restart_scheme_.passParams(&working_params);
489504
restart_scheme_.UpdateBeta(working_params.omega * working_params.omega);
490505

@@ -495,9 +510,15 @@ void PDLPSolver::solve(std::vector<double>& x, std::vector<double>& y) {
495510
x_current_ = restart_x;
496511
y_current_ = restart_y;
497512

498-
UpdateAverageIterates(
499-
x_current_, y_current_, working_params,
500-
-1);
513+
//UpdateAverageIterates(
514+
// x_current_, y_current_, working_params,
515+
// -1);
516+
x_sum_.assign(x_current_.size(), 0.0);
517+
y_sum_.assign(y_current_.size(), 0.0);
518+
sum_weights_ = 0.0;
519+
520+
x_avg_ = x_current_;
521+
y_avg_ = y_current_;
501522

502523
// Recompute Ax and ATy for the restarted iterates
503524
linalg::Ax(lp, x_current_, Ax_cache_);
@@ -609,22 +630,32 @@ void PDLPSolver::PDHG_Compute_Step_Size_Ratio(
609630
const std::vector<double>& y_n_minus_1_0) {
610631
// 1. Calculate the L2 norm of the difference between current and last-restart
611632
// iterates.
612-
double primal_diff_norm = linalg::diffTwoNorm(x_n_0, x_n_minus_1_0);
613-
double dual_diff_norm = linalg::diffTwoNorm(y_n_0, y_n_minus_1_0);
633+
//double primal_diff_norm = linalg::diffTwoNorm(x_n_0, x_n_minus_1_0);
634+
double primal_diff_norm = linalg::diffTwoNorm(x_at_last_restart_, x_current_);
635+
double norm_xLastRestart = linalg::vector_norm(x_at_last_restart_);
636+
double norm_xCurrent = linalg::vector_norm(x_current_);
637+
638+
//double dual_diff_norm = linalg::diffTwoNorm(y_n_0, y_n_minus_1_0);
639+
double dual_diff_norm = linalg::diffTwoNorm(y_at_last_restart_, y_current_);
640+
641+
double dMeanStepSize = std::sqrt(stepsize_.primal_step *
642+
stepsize_.dual_step);
643+
614644

615645
// 2. Update the primal weight (beta = omega^2) if movements are significant.
616646
if (std::min(primal_diff_norm, dual_diff_norm) > 1e-10) {
617647
double beta_update_ratio = dual_diff_norm / primal_diff_norm;
618-
double current_beta = working_params.omega * working_params.omega;
648+
double old_beta = working_params.omega * working_params.omega;
619649

620650
double dLogBetaUpdate = 0.5 * std::log(beta_update_ratio) +
621-
0.5 * std::log(std::sqrt(current_beta));
622-
double new_beta = std::exp(2.0 * dLogBetaUpdate);
623-
624-
working_params.omega = std::sqrt(new_beta);
625-
std::cout << "new beta: " << new_beta << ", omega: " << working_params.omega
626-
<< std::endl;
651+
0.5 * std::log(std::sqrt(old_beta));
652+
stepsize_.beta = std::exp(2.0 * dLogBetaUpdate);
627653
}
654+
655+
stepsize_.primal_step = dMeanStepSize / working_params.omega;
656+
stepsize_.dual_step = stepsize_.primal_step * stepsize_.beta;
657+
working_params.eta = std::sqrt(stepsize_.primal_step * stepsize_.dual_step);
658+
working_params.omega = std::sqrt(stepsize_.beta);
628659
}
629660

630661
void PDLPSolver::UpdateAverageIterates(const std::vector<double>& x,
@@ -639,36 +670,15 @@ void PDLPSolver::UpdateAverageIterates(const std::vector<double>& x,
639670
// Initialize averages to current point
640671
x_avg_ = x;
641672
y_avg_ = y;
642-
} else if (inner_iter == 0) {
643-
// First iteration after restart - already initialized, just set up sums
644-
x_sum_ = x_avg_; // Start accumulating from current average
645-
y_sum_ = y_avg_;
646-
for (size_t i = 0; i < x_sum_.size(); ++i) {
647-
x_sum_[i] *= params.eta;
648-
}
649-
for (size_t i = 0; i < y_sum_.size(); ++i) {
650-
y_sum_[i] *= params.eta;
651-
}
652-
sum_weights_ = params.eta;
653673
} else {
654-
// Normal update
674+
double weight = params.eta; // this should be the mean step size
655675
for (size_t i = 0; i < x.size(); ++i) {
656-
x_sum_[i] += x[i] * params.eta;
676+
x_sum_[i] += x[i] * weight;
657677
}
658678
for (size_t i = 0; i < y.size(); ++i) {
659-
y_sum_[i] += y[i] * params.eta;
660-
}
661-
sum_weights_ += params.eta;
662-
663-
// Recompute averages
664-
if (sum_weights_ > 0) {
665-
for (size_t i = 0; i < x.size(); ++i) {
666-
x_avg_[i] = x_sum_[i] / sum_weights_;
667-
}
668-
for (size_t i = 0; i < y.size(); ++i) {
669-
y_avg_[i] = y_sum_[i] / sum_weights_;
670-
}
679+
y_sum_[i] += y[i] * weight;
671680
}
681+
sum_weights_ += weight;
672682
}
673683
}
674684

highs/pdlp/hipdlp/pdhg.hpp

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,8 @@ class PDLPSolver {
4545
// --- Getters ---
4646
TerminationStatus getTerminationCode() const { return results_.term_code; }
4747
int getIterationCount() const { return final_iter_count_; }
48+
int getnCol() const { return lp_.num_col_; }
49+
int getnRow() const { return lp_.num_row_; }
4850

4951
// --- Debugging ---
5052
FILE* debug_pdlp_log_file_ = nullptr;

0 commit comments

Comments
 (0)