Skip to content

Commit 3818397

Browse files
authored
Merge branch 'main' into main
2 parents e0c3887 + 9a5ac0b commit 3818397

File tree

4 files changed

+10
-7
lines changed

4 files changed

+10
-7
lines changed

RELEASENOTES.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,9 @@ Releases, starting with 9/2/2021, are listed with the most recent release at the
66
__Bug Fixes__:
77

88
#1426 Sequential.eval() does not put model into eval mode<br/>
9+
`torch.optim.lr_scheduler.LinearLR` `end_factor` default has been corrected, is now 1.0.<br/>
10+
`torch.optim.lr_scheduler.PolynomialLR` `power` type has been corrected, is now double.<br/>
11+
912
# NuGet Version 0.105.0
1013

1114
Move to libtorch 2.5.1. As with the 2.4.0 release, MacOS / Intel is no longer supported by libtorch, so TorchSharp doesn, either.

src/TorchSharp/Optimizers/LRScheduler.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ public class PolynomialLR : LRScheduler
325325
/// <param name="last_epoch">The index of last epoch. Default: -1.</param>
326326
/// <param name="verbose"> If true, prints a message to stdout for each update. Default: false.</param>
327327
/// <returns>A scheduler</returns>
328-
public PolynomialLR(Optimizer optimizer, int total_iters = 5, int power = 1, int last_epoch = -1, bool verbose = false) : base(optimizer, last_epoch, verbose)
328+
public PolynomialLR(Optimizer optimizer, int total_iters = 5, double power = 1.0, int last_epoch = -1, bool verbose = false) : base(optimizer, last_epoch, verbose)
329329
{
330330
if (optimizer == null) throw new ArgumentNullException("optimizer");
331331
_power = power;
@@ -359,7 +359,7 @@ protected override IEnumerable<double> get_closed_form_lr()
359359
}
360360

361361
private double _total_iters;
362-
private int _power;
362+
private double _power;
363363
}
364364

365365
/// <summary>
@@ -1306,7 +1306,7 @@ public static LRScheduler MultiStepLR(Optimizer optimizer, IList<int> milestones
13061306
/// <param name="last_epoch">The index of last epoch. Default: -1.</param>
13071307
/// <param name="verbose"> If true, prints a message to stdout for each update. Default: false.</param>
13081308
/// <returns>A scheduler</returns>
1309-
public static LRScheduler PolynomialLR(Optimizer optimizer, int total_iters = 5, int power = 1, int last_epoch = -1, bool verbose = false)
1309+
public static LRScheduler PolynomialLR(Optimizer optimizer, int total_iters = 5, double power = 1, int last_epoch = -1, bool verbose = false)
13101310
{
13111311
return new impl.PolynomialLR(optimizer, total_iters, power, last_epoch, verbose);
13121312
}
@@ -1398,7 +1398,7 @@ public static LRScheduler SequentialLR(Optimizer optimizer, IEnumerable<LRSchedu
13981398
/// </param>
13991399
/// <param name="verbose">If true, prints a message to stdout for each update. Default: false.</param>
14001400
/// <returns>A scheduler</returns>
1401-
public static LRScheduler LinearLR(Optimizer optimizer, double start_factor = 1.0 / 3, double end_factor = 5, int total_iters = 5, int last_epoch = -1, bool verbose = false)
1401+
public static LRScheduler LinearLR(Optimizer optimizer, double start_factor = 1.0 / 3, double end_factor = 1.0, int total_iters = 5, int last_epoch = -1, bool verbose = false)
14021402
{
14031403
return new impl.LinearLR(optimizer, start_factor, end_factor, total_iters, last_epoch, verbose);
14041404
}

test/TorchSharpTest/TestTorchTensorBugs.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -873,7 +873,7 @@ public void ValidatePolynomialLR()
873873

874874
double learning_rate = 0.1;
875875
var optimizer = torch.optim.SGD(seq.parameters(), learning_rate);
876-
var scheduler = torch.optim.lr_scheduler.PolynomialLR(optimizer, 10, 1);
876+
var scheduler = torch.optim.lr_scheduler.PolynomialLR(optimizer, 10, 1.0);
877877

878878
optimizer.zero_grad();
879879
optimizer.step();
@@ -907,7 +907,7 @@ public void ValidatePolynomialLR()
907907

908908
double learning_rate = 0.1;
909909
var optimizer = torch.optim.SGD(seq.parameters(), learning_rate);
910-
var scheduler = torch.optim.lr_scheduler.PolynomialLR(optimizer, 10, 2);
910+
var scheduler = torch.optim.lr_scheduler.PolynomialLR(optimizer, 10, 2.0);
911911

912912
optimizer.zero_grad();
913913
optimizer.step();

test/TorchSharpTest/TestTraining.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1654,7 +1654,7 @@ public void TrainingSGDSequentialLRWithAllClosedFormSchedulers()
16541654
var scheduler2 = torch.optim.lr_scheduler.StepLR(optimizer, 2);
16551655
var scheduler3 = torch.optim.lr_scheduler.MultiStepLR(optimizer, new[] { 2, 4 });
16561656
var scheduler4 = torch.optim.lr_scheduler.ExponentialLR(optimizer);
1657-
var scheduler5 = torch.optim.lr_scheduler.PolynomialLR(optimizer, power: 2);
1657+
var scheduler5 = torch.optim.lr_scheduler.PolynomialLR(optimizer, power: 2.0);
16581658
var scheduler6 = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, 5, 0.1);
16591659
var scheduler7 = torch.optim.lr_scheduler.LinearLR(optimizer, end_factor: 0.75);
16601660
var scheduler = torch.optim.lr_scheduler.SequentialLR(optimizer, new[] { scheduler0, scheduler1, scheduler2, scheduler3, scheduler4, scheduler5, scheduler6, scheduler7}, new[] { 5, 5, 5, 5, 5, 5, 5 });

0 commit comments

Comments
 (0)