Skip to content

Commit c32a467

Browse files
Merge pull request #1408 from NiklasGustafsson/unit
Adapted to some API Compat complaints.
2 parents ffdcd89 + 3e77e9f commit c32a467

File tree

16 files changed

+164
-13
lines changed

16 files changed

+164
-13
lines changed

src/TorchSharp/NN/Activation/GELU.cs

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,19 @@ public static partial class torch
3232
{
3333
public static partial class nn
3434
{
35+
/// <summary>
36+
/// Gaussian Error Linear Units
37+
/// </summary>
38+
public static GELU GELU()
39+
{
40+
return new GELU(false);
41+
}
42+
3543
/// <summary>
3644
/// Gaussian Error Linear Units
3745
/// </summary>
3846
/// <param name="inplace">Do the operation in-place. Default: False</param>
39-
public static GELU GELU(bool inplace = false)
47+
public static GELU GELU(bool inplace)
4048
{
4149
return new GELU(inplace);
4250
}
@@ -48,10 +56,20 @@ public static partial class functional
4856
/// </summary>
4957
/// <param name="x">The input tensor</param>
5058
/// <param name="inplace">Do the operation in-place. Default: False</param>
51-
public static Tensor gelu(Tensor x, bool inplace = false)
59+
public static Tensor gelu(Tensor x, bool inplace)
5260
{
5361
return inplace ? x.gelu_().alias() : x.gelu();
5462
}
63+
64+
/// <summary>
65+
/// Gaussian Error Linear Units
66+
/// </summary>
67+
/// <param name="x">The input tensor</param>
68+
/// <remarks>The defaulting of 'inplace' to 'false' is implemented as an overload to avoid a breaking change.</remarks>
69+
public static Tensor gelu(Tensor x)
70+
{
71+
return gelu(x,false);
72+
}
5573
}
5674
}
5775
}

src/TorchSharp/NN/Activation/Hardshrink.cs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,15 @@ public static Tensor hardshrink(Tensor x, double lambda = 0.5)
5757
if (result == IntPtr.Zero) { torch.CheckForErrors(); }
5858
return new Tensor(result);
5959
}
60+
61+
/// <summary>
62+
/// Hardshrink
63+
/// </summary>
64+
/// <param name="x">The input tensor</param>
65+
/// <param name="lambda">The λ value for the Hardshrink formulation. Default: 0.5</param>
66+
/// <remarks>Only here for backward comaptibility.</remarks>
67+
[Obsolete("Not using the PyTorch naming convention.",false)]
68+
public static Tensor Hardshrink(Tensor x, double lambda = 0.5) => hardshrink(x, lambda);
6069
}
6170
}
6271
}

src/TorchSharp/NN/Activation/Hardtanh.cs

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,17 @@ public static Tensor hardtanh(Tensor x, double min_val = -1.0, double max_val =
6767
{
6868
return inplace ? x.hardtanh_(min_val, max_val).alias() : x.hardtanh(min_val, max_val);
6969
}
70+
71+
/// <summary>
72+
/// Hardshrink
73+
/// </summary>
74+
/// <param name="x">The input tensor</param>
75+
/// <param name="min_val">Minimum value of the linear region range.</param>
76+
/// <param name="max_val">Maximum value of the linear region range.</param>
77+
/// <param name="inplace">Do the operation in-place</param>
78+
/// <remarks>Only here for backward comaptibility.</remarks>
79+
[Obsolete("Not using the PyTorch naming convention.",false)]
80+
public static Tensor Hardtanh(Tensor x, double min_val = -1.0, double max_val = 1.0, bool inplace = false) => hardtanh(x, min_val, max_val, inplace);
7081
}
7182
}
7283
}

src/TorchSharp/NN/Activation/Mish.cs

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,19 @@ public static partial class torch
3232
{
3333
public static partial class nn
3434
{
35+
/// <summary>
36+
/// A Self Regularized Non-Monotonic Neural Activation Function.
37+
/// </summary>
38+
public static Mish Mish()
39+
{
40+
return new Mish(false);
41+
}
42+
3543
/// <summary>
3644
/// A Self Regularized Non-Monotonic Neural Activation Function.
3745
/// </summary>
3846
/// <param name="inplace">Do the operation in-place. Default: False</param>
39-
public static Mish Mish(bool inplace = false)
47+
public static Mish Mish(bool inplace)
4048
{
4149
return new Mish(inplace);
4250
}
@@ -54,6 +62,13 @@ public static Tensor mish(Tensor x, bool inplace = false)
5462
using var t2 = t1.tanh();
5563
return inplace ? x.mul_(t2).alias() : x.mul(t2);
5664
}
65+
66+
/// <summary>
67+
/// A Self Regularized Non-Monotonic Neural Activation Function.
68+
/// </summary>
69+
/// <param name="x">The input tensor</param>
70+
[Obsolete("Not using the PyTorch naming convention.",false)]
71+
public static Tensor Mish(Tensor x) => mish(x, false);
5772
}
5873
}
5974
}

src/TorchSharp/NN/Activation/SiLU.cs

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,15 @@ public static partial class nn
3939
/// <summary>
4040
/// Sigmoid-Weighted Linear Unit
4141
/// </summary>
42-
public static SiLU SiLU(bool inplace = false)
42+
public static SiLU SiLU()
43+
{
44+
return new SiLU(false);
45+
}
46+
47+
/// <summary>
48+
/// Sigmoid-Weighted Linear Unit
49+
/// </summary>
50+
public static SiLU SiLU(bool inplace)
4351
{
4452
return new SiLU(inplace);
4553
}

src/TorchSharp/NN/Activation/Sigmoid.cs

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,15 @@ public static partial class torch
3131
{
3232
public static partial class nn
3333
{
34+
/// <summary>
35+
/// Sigmoid activation
36+
/// </summary>
37+
/// <returns></returns>
38+
public static Sigmoid Sigmoid()
39+
{
40+
return new Sigmoid(false);
41+
}
42+
3443
/// <summary>
3544
/// Sigmoid activation
3645
/// </summary>
@@ -49,10 +58,20 @@ public static partial class functional
4958
/// <param name="x">The input tensor</param>
5059
/// <param name="inplace">Do the operation in-place. Default: False</param>
5160
/// <returns></returns>
52-
public static Tensor sigmoid(Tensor x, bool inplace = false)
61+
public static Tensor sigmoid(Tensor x, bool inplace)
5362
{
5463
return inplace ? x.sigmoid_().alias() : x.sigmoid();
5564
}
65+
66+
/// <summary>
67+
/// Gaussian Error Linear Units
68+
/// </summary>
69+
/// <param name="x">The input tensor</param>
70+
/// <remarks>The defaulting of 'inplace' to 'false' is implemented as an overload to avoid a breaking change.</remarks>
71+
public static Tensor sigmoid(Tensor x)
72+
{
73+
return sigmoid(x,false);
74+
}
5675
}
5776
}
5877
}

src/TorchSharp/NN/Activation/Softplus.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ namespace Modules
1414
/// </summary>
1515
public sealed class Softplus : ParameterLessModule<Tensor, Tensor>
1616
{
17-
internal Softplus(int beta = 1, int threshold = 20) : base(nameof(Softplus))
17+
internal Softplus(double beta = 1, double threshold = 20) : base(nameof(Softplus))
1818
{
1919
this.beta = beta;
2020
this.threshold = threshold;
@@ -25,8 +25,8 @@ public override Tensor forward(Tensor tensor)
2525
return torch.nn.functional.softplus(tensor, beta, threshold);
2626
}
2727

28-
public int beta {get; set;}
29-
public int threshold {get; set;}
28+
public double beta {get; set;}
29+
public double threshold {get; set;}
3030
}
3131
}
3232

@@ -40,7 +40,7 @@ public static partial class nn
4040
/// <param name="beta">The β value for the Softplus formulation.</param>
4141
/// <param name="threshold">Values above this revert to a linear function</param>
4242
/// <returns></returns>
43-
public static Softplus Softplus(int beta = 1, int threshold = 20)
43+
public static Softplus Softplus(double beta = 1, double threshold = 20)
4444
{
4545
return new Softplus(beta, threshold);
4646
}
@@ -54,7 +54,7 @@ public static partial class functional
5454
/// <param name="beta">The β value for the Softplus formulation.</param>
5555
/// <param name="threshold">Values above this revert to a linear function</param>
5656
/// <returns></returns>
57-
public static Tensor softplus(Tensor x, int beta = 1, int threshold = 20)
57+
public static Tensor softplus(Tensor x, double beta = 1, double threshold = 20)
5858
{
5959
return x.softplus(beta, threshold);
6060
}

src/TorchSharp/NN/Activation/Softshrink.cs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,14 @@ public static Tensor softshrink(Tensor x, double lambda = 0.5)
5757
if (result == IntPtr.Zero) { torch.CheckForErrors(); }
5858
return new Tensor(result);
5959
}
60+
61+
/// <summary>
62+
/// Softshrink
63+
/// </summary>
64+
/// <param name="x">The input tensor</param>
65+
/// <param name="lambda">The λ value for the Softshrink formulation. Default: 0.5</param>
66+
[Obsolete("Not using the PyTorch naming convention.",false)]
67+
public static Tensor Softshrink(Tensor x, double lambda = 0.5) => softshrink(x, lambda);
6068
}
6169
}
6270
}

src/TorchSharp/NN/Activation/Softsign.cs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,13 @@ public static Tensor softsign(Tensor x, bool inplace = false)
5454
using var y = 1 + abs;
5555
return inplace ? x.div_(y).alias() : x.div(y);
5656
}
57+
58+
/// <summary>
59+
/// Softsign
60+
/// </summary>
61+
/// <param name="x">The input tensor</param>
62+
[Obsolete("Not using the PyTorch naming convention.",false)]
63+
public static Tensor Softsign(Tensor x) => softsign(x, false);
5764
}
5865
}
5966
}

src/TorchSharp/NN/Activation/Tanh.cs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,15 @@ public static partial class torch
3737
{
3838
public static partial class nn
3939
{
40+
/// <summary>
41+
/// Tanh activation
42+
/// </summary>
43+
/// <returns></returns>
44+
public static Tanh Tanh()
45+
{
46+
return new Tanh(false);
47+
}
48+
4049
/// <summary>
4150
/// Tanh activation
4251
/// </summary>

0 commit comments

Comments
 (0)