Skip to content

Commit f9866eb

Browse files
1. Renamed ParamLessModule to ParameterLessModule
2. Removed redundant '_to()' overrides in parameter-less modules. 3. Added ParameterLessModule<...> for all the type signatures that Module<...> supports.
1 parent 1736f0f commit f9866eb

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+199
-355
lines changed

src/TorchSharp/NN/Activation/CELU.cs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ namespace Modules
1212
/// <summary>
1313
/// This class is used to represent a CELU module.
1414
/// </summary>
15-
public sealed class CELU : ParamLessModule<Tensor, Tensor>
15+
public sealed class CELU : ParameterLessModule<Tensor, Tensor>
1616
{
1717
internal CELU(double alpha, bool inplace) : base(nameof(CELU))
1818
{
@@ -25,12 +25,6 @@ public override Tensor forward(Tensor tensor)
2525
return torch.nn.functional.celu(tensor, alpha, inplace);
2626
}
2727

28-
// Rather than spending cycles only to discover that this module has neither
29-
// parameters nor buffers, just shortcut the move completely.
30-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
31-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
32-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
33-
3428
public double alpha {get; set;}
3529
public bool inplace {get; set; }
3630
}

src/TorchSharp/NN/Activation/ELU.cs

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@ namespace Modules
1212
/// <summary>
1313
/// This class is used to represent a ELU module.
1414
/// </summary>
15-
public sealed class ELU : ParamLessModule<Tensor, Tensor>
15+
public sealed class ELU : ParameterLessModule<Tensor, Tensor>
1616
{
1717
internal ELU(double alpha, bool inplace) : base(nameof(ELU))
18-
{
18+
{
1919
this.alpha = alpha;
2020
this.inplace = inplace;
2121
}
@@ -25,12 +25,6 @@ public override Tensor forward(Tensor tensor)
2525
return torch.nn.functional.elu(tensor, alpha, inplace);
2626
}
2727

28-
// Rather than spending cycles only to discover that this module has neither
29-
// parameters nor buffers, just shortcut the move completely.
30-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
31-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
32-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
33-
3428
public double alpha {get; set;}
3529

3630
public bool inplace {get; set;}

src/TorchSharp/NN/Activation/GELU.cs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ namespace Modules
1212
/// <summary>
1313
/// This class is used to represent a GELU module.
1414
/// </summary>
15-
public sealed class GELU : ParamLessModule<Tensor, Tensor>
15+
public sealed class GELU : ParameterLessModule<Tensor, Tensor>
1616
{
1717
internal GELU(bool inplace) : base(nameof(GELU))
1818
{
@@ -24,12 +24,6 @@ public override Tensor forward(Tensor tensor)
2424
return torch.nn.functional.gelu(tensor, inplace);
2525
}
2626

27-
// Rather than spending cycles only to discover that this module has neither
28-
// parameters nor buffers, just shortcut the move completely.
29-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
30-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
31-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
32-
3327
public bool inplace {get; set; }
3428
}
3529
}

src/TorchSharp/NN/Activation/GLU.cs

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@ namespace Modules
1212
/// <summary>
1313
/// This class is used to represent a GLU (gated linear unit) module.
1414
/// </summary>
15-
public sealed class GLU : ParamLessModule<Tensor, Tensor>
15+
public sealed class GLU : ParameterLessModule<Tensor, Tensor>
1616
{
17-
internal GLU(long dim) : base(nameof(GLU))
18-
{
17+
internal GLU(long dim) : base(nameof(GLU))
18+
{
1919
this.dim = dim;
2020
}
2121

@@ -24,12 +24,6 @@ public override Tensor forward(Tensor tensor)
2424
return torch.nn.functional.glu(tensor, dim);
2525
}
2626

27-
// Rather than spending cycles only to discover that this module has neither
28-
// parameters nor buffers, just shortcut the move completely.
29-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
30-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
31-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
32-
3327
public long dim {get; set;}
3428
}
3529
}

src/TorchSharp/NN/Activation/Hardshrink.cs

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@ namespace Modules
1212
/// <summary>
1313
/// This class is used to represent a Hardshrink module.
1414
/// </summary>
15-
public sealed class Hardshrink : ParamLessModule<Tensor, Tensor>
15+
public sealed class Hardshrink : ParameterLessModule<Tensor, Tensor>
1616
{
17-
internal Hardshrink(double lambda = 0.5) : base(nameof(Hardshrink))
18-
{
17+
internal Hardshrink(double lambda = 0.5) : base(nameof(Hardshrink))
18+
{
1919
this.lambda = lambda;
2020
}
2121

@@ -24,12 +24,6 @@ public override Tensor forward(Tensor tensor)
2424
return torch.nn.functional.hardshrink(tensor, lambda);
2525
}
2626

27-
// Rather than spending cycles only to discover that this module has neither
28-
// parameters nor buffers, just shortcut the move completely.
29-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
30-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
31-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
32-
3327
public double lambda {get; set; }
3428
}
3529
}

src/TorchSharp/NN/Activation/Hardsigmoid.cs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ namespace Modules
1111
/// <summary>
1212
/// This class is used to represent a Hardsigmoid module.
1313
/// </summary>
14-
public sealed class Hardsigmoid : ParamLessModule<Tensor, Tensor>
14+
public sealed class Hardsigmoid : ParameterLessModule<Tensor, Tensor>
1515
{
1616
internal Hardsigmoid(bool inplace) : base(nameof(Hardsigmoid))
1717
{
@@ -23,12 +23,6 @@ public override Tensor forward(Tensor tensor)
2323
return torch.nn.functional.hardsigmoid(tensor, inplace);
2424
}
2525

26-
// Rather than spending cycles only to discover that this module has neither
27-
// parameters nor buffers, just shortcut the move completely.
28-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
29-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
30-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
31-
3226
public bool inplace {get; set; }
3327
}
3428
}

src/TorchSharp/NN/Activation/Hardswish.cs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ namespace Modules
1111
/// <summary>
1212
/// This class is used to represent a Hardswish module.
1313
/// </summary>
14-
public sealed class Hardswish : ParamLessModule<Tensor, Tensor>
14+
public sealed class Hardswish : ParameterLessModule<Tensor, Tensor>
1515
{
1616
public bool inplace { get; set;}
1717

@@ -24,12 +24,6 @@ public override Tensor forward(Tensor tensor)
2424
{
2525
return torch.nn.functional.hardswish(tensor, this.inplace);
2626
}
27-
28-
// Rather than spending cycles only to discover that this module has neither
29-
// parameters nor buffers, just shortcut the move completely.
30-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
31-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
32-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
3327
}
3428
}
3529

src/TorchSharp/NN/Activation/Hardtanh.cs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ namespace Modules
1212
/// <summary>
1313
/// This class is used to represent a Hardtanh module.
1414
/// </summary>
15-
public sealed class Hardtanh : ParamLessModule<Tensor, Tensor>
15+
public sealed class Hardtanh : ParameterLessModule<Tensor, Tensor>
1616
{
1717
internal Hardtanh(double min_val = -1.0, double max_val = 1.0, bool inplace = false) : base(nameof(Hardtanh))
1818
{
@@ -31,12 +31,6 @@ public override string GetName()
3131
return typeof(Hardtanh).Name;
3232
}
3333

34-
// Rather than spending cycles only to discover that this module has neither
35-
// parameters nor buffers, just shortcut the move completely.
36-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
37-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
38-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
39-
4034
public double min_val { get; set; }
4135
public double max_val { get; set; }
4236
public bool inplace {get; set; }

src/TorchSharp/NN/Activation/LeakyReLU.cs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ namespace Modules
1212
/// <summary>
1313
/// This class is used to represent a LeakyReLU module.
1414
/// </summary>
15-
public sealed class LeakyReLU : ParamLessModule<Tensor, Tensor>
15+
public sealed class LeakyReLU : ParameterLessModule<Tensor, Tensor>
1616
{
1717
internal LeakyReLU(double negative_slope, bool inplace) : base(nameof(LeakyReLU))
1818
{
@@ -25,12 +25,6 @@ public override Tensor forward(Tensor tensor)
2525
return torch.nn.functional.leaky_relu(tensor, negative_slope, inplace);
2626
}
2727

28-
// Rather than spending cycles only to discover that this module has neither
29-
// parameters nor buffers, just shortcut the move completely.
30-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
31-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
32-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
33-
3428
public bool inplace {get; set; }
3529
public double negative_slope {get; set;}
3630
}

src/TorchSharp/NN/Activation/LogSigmoid.cs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ namespace Modules
1212
/// <summary>
1313
/// This class is used to represent a LogSigmoid module.
1414
/// </summary>
15-
public sealed class LogSigmoid : ParamLessModule<Tensor, Tensor>
15+
public sealed class LogSigmoid : ParameterLessModule<Tensor, Tensor>
1616
{
1717
internal LogSigmoid() : base(nameof(LogSigmoid))
1818
{
@@ -22,12 +22,6 @@ public override Tensor forward(Tensor tensor)
2222
{
2323
return torch.nn.functional.logsigmoid(tensor);
2424
}
25-
26-
// Rather than spending cycles only to discover that this module has neither
27-
// parameters nor buffers, just shortcut the move completely.
28-
protected internal override nn.Module _to(Device device, ScalarType dtype, bool non_blocking) => this;
29-
protected internal override nn.Module _to(DeviceType deviceType, int deviceIndex, bool non_blocking) => this;
30-
protected internal override nn.Module _to(ScalarType dtype, bool non_blocking) => this;
3125
}
3226
}
3327
public static partial class torch

0 commit comments

Comments
 (0)