Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix PolynomialLR power type. #1440

Merged
merged 2 commits into from
Mar 19, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions RELEASENOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ __Bug Fixes__:

#1426 Sequential.eval() does not put model into eval mode<br/>
`torch.optim.lr_scheduler.LinearLR` `end_factor` default has been corrected, is now 1.0.<br/>
`torch.optim.lr_scheduler.PolynomialLR` `power` type has been corrected, is now double.<br/>

# NuGet Version 0.105.0

Expand Down
6 changes: 3 additions & 3 deletions src/TorchSharp/Optimizers/LRScheduler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ public class PolynomialLR : LRScheduler
/// <param name="last_epoch">The index of last epoch. Default: -1.</param>
/// <param name="verbose"> If true, prints a message to stdout for each update. Default: false.</param>
/// <returns>A scheduler</returns>
public PolynomialLR(Optimizer optimizer, int total_iters = 5, int power = 1, int last_epoch = -1, bool verbose = false) : base(optimizer, last_epoch, verbose)
public PolynomialLR(Optimizer optimizer, int total_iters = 5, double power = 1.0, int last_epoch = -1, bool verbose = false) : base(optimizer, last_epoch, verbose)
{
if (optimizer == null) throw new ArgumentNullException("optimizer");
_power = power;
Expand Down Expand Up @@ -359,7 +359,7 @@ protected override IEnumerable<double> get_closed_form_lr()
}

private double _total_iters;
private int _power;
private double _power;
}

/// <summary>
Expand Down Expand Up @@ -1306,7 +1306,7 @@ public static LRScheduler MultiStepLR(Optimizer optimizer, IList<int> milestones
/// <param name="last_epoch">The index of last epoch. Default: -1.</param>
/// <param name="verbose"> If true, prints a message to stdout for each update. Default: false.</param>
/// <returns>A scheduler</returns>
public static LRScheduler PolynomialLR(Optimizer optimizer, int total_iters = 5, int power = 1, int last_epoch = -1, bool verbose = false)
public static LRScheduler PolynomialLR(Optimizer optimizer, int total_iters = 5, double power = 1, int last_epoch = -1, bool verbose = false)
{
return new impl.PolynomialLR(optimizer, total_iters, power, last_epoch, verbose);
}
Expand Down
4 changes: 2 additions & 2 deletions test/TorchSharpTest/TestTorchTensorBugs.cs
Original file line number Diff line number Diff line change
Expand Up @@ -873,7 +873,7 @@ public void ValidatePolynomialLR()

double learning_rate = 0.1;
var optimizer = torch.optim.SGD(seq.parameters(), learning_rate);
var scheduler = torch.optim.lr_scheduler.PolynomialLR(optimizer, 10, 1);
var scheduler = torch.optim.lr_scheduler.PolynomialLR(optimizer, 10, 1.0);

optimizer.zero_grad();
optimizer.step();
Expand Down Expand Up @@ -907,7 +907,7 @@ public void ValidatePolynomialLR()

double learning_rate = 0.1;
var optimizer = torch.optim.SGD(seq.parameters(), learning_rate);
var scheduler = torch.optim.lr_scheduler.PolynomialLR(optimizer, 10, 2);
var scheduler = torch.optim.lr_scheduler.PolynomialLR(optimizer, 10, 2.0);

optimizer.zero_grad();
optimizer.step();
Expand Down
2 changes: 1 addition & 1 deletion test/TorchSharpTest/TestTraining.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1654,7 +1654,7 @@ public void TrainingSGDSequentialLRWithAllClosedFormSchedulers()
var scheduler2 = torch.optim.lr_scheduler.StepLR(optimizer, 2);
var scheduler3 = torch.optim.lr_scheduler.MultiStepLR(optimizer, new[] { 2, 4 });
var scheduler4 = torch.optim.lr_scheduler.ExponentialLR(optimizer);
var scheduler5 = torch.optim.lr_scheduler.PolynomialLR(optimizer, power: 2);
var scheduler5 = torch.optim.lr_scheduler.PolynomialLR(optimizer, power: 2.0);
var scheduler6 = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, 5, 0.1);
var scheduler7 = torch.optim.lr_scheduler.LinearLR(optimizer, end_factor: 0.75);
var scheduler = torch.optim.lr_scheduler.SequentialLR(optimizer, new[] { scheduler0, scheduler1, scheduler2, scheduler3, scheduler4, scheduler5, scheduler6, scheduler7}, new[] { 5, 5, 5, 5, 5, 5, 5 });
Expand Down