Skip to content

Commit ffdcd89

Browse files
Merge pull request #1398 from NiklasGustafsson/unit
Replace native module implementation with managed code.
2 parents b2bb7e8 + 93d2bdc commit ffdcd89

File tree

157 files changed

+4810
-6343
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

157 files changed

+4810
-6343
lines changed

RELEASENOTES.md

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,23 @@ Releases, starting with 9/2/2021, are listed with the most recent release at the
44

55
# NuGet Version 0.104.0
66

7+
This is a big change in implementation, but not as big in API surface area. Many of the builtin modules, but not all, were re-implemented in managed code calling into native code via the functional APIs. This has several advantages:
8+
9+
1. Align with the Pytorch implementations.<br/>
10+
2. More easily expose module attributes as properties as Pytorch does.<br/>
11+
3. In some cases, avoid native code altogether.<br/>
12+
4. The builtin modules can serve as "best practice" examples for custom module authors.<br/>
13+
714
__Breaking Changes__:
815

9-
The argument defaults for `torch.diagonal()` and `Tensor.diagonal()` arguments have been changed.
16+
The names of several arguments have been changed to align better with Pytorch naming. This may break code that passes such arguments by name, but will be caught at compile time.
1017

11-
__Bug Fixes__:
18+
The argument defaults for `torch.diagonal()` and `Tensor.diagonal()` arguments have been corrected.
19+
20+
__Issues fixed__:
1221

22+
#1397 Look into whether parameter creation from a tensor leads to incorrect dispose scope statistics. This bug was discovered during testing of the PR.<br/>
23+
#1210 Attribute omissions.<br/>
1324
#1400 There may be an error in torchvision.transforms.GaussianBlur<br/>
1425
#1402 diagonal() has incorrect default<br/>
1526

TorchSharp.sln

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -111,10 +111,10 @@ Global
111111
{CAD9DB7F-3223-3324-884D-FA2381593DA7}.Debug|x64.ActiveCfg = Debug|x64
112112
{CAD9DB7F-3223-3324-884D-FA2381593DA7}.Release|Any CPU.ActiveCfg = Release|x64
113113
{CAD9DB7F-3223-3324-884D-FA2381593DA7}.Release|x64.ActiveCfg = Release|x64
114-
{BB811429-0DF1-3D22-B664-09C2F5A9E0AB}.Debug|Any CPU.ActiveCfg = Debug|x64
115-
{BB811429-0DF1-3D22-B664-09C2F5A9E0AB}.Debug|x64.ActiveCfg = Debug|x64
116-
{BB811429-0DF1-3D22-B664-09C2F5A9E0AB}.Release|Any CPU.ActiveCfg = Release|x64
117-
{BB811429-0DF1-3D22-B664-09C2F5A9E0AB}.Release|x64.ActiveCfg = Release|x64
114+
{E4C0DBEE-0815-311B-9065-137BB50BD793}.Debug|Any CPU.ActiveCfg = Debug|x64
115+
{E4C0DBEE-0815-311B-9065-137BB50BD793}.Debug|x64.ActiveCfg = Debug|x64
116+
{E4C0DBEE-0815-311B-9065-137BB50BD793}.Release|Any CPU.ActiveCfg = Release|x64
117+
{E4C0DBEE-0815-311B-9065-137BB50BD793}.Release|x64.ActiveCfg = Release|x64
118118
{DD652544-711E-4029-83FF-DA4A9600E6E7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
119119
{DD652544-711E-4029-83FF-DA4A9600E6E7}.Debug|Any CPU.Build.0 = Debug|Any CPU
120120
{DD652544-711E-4029-83FF-DA4A9600E6E7}.Debug|x64.ActiveCfg = Debug|Any CPU

build/BranchInfo.props

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
<Project>
22
<PropertyGroup>
33
<MajorVersion>0</MajorVersion>
4-
<MinorVersion>103</MinorVersion>
5-
<PatchVersion>1</PatchVersion>
6-
<PreviousPackageVersion>0.103.0</PreviousPackageVersion>
4+
<MinorVersion>104</MinorVersion>
5+
<PatchVersion>0</PatchVersion>
6+
<PreviousPackageVersion>0.103.1</PreviousPackageVersion>
77
</PropertyGroup>
88
</Project>

docfx/articles/modules.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ To illustrate, this is the code for MobileNet from the TorchSharp examples:
8484
var modules = new List<(string, Module)>();
8585

8686
modules.Add(("conv2d-first",
87-
Conv2d(3, 32, kernelSize: 3, stride: 1, padding: 1, bias: false)));
87+
Conv2d(3, 32, kernel_size: 3, stride: 1, padding: 1, bias: false)));
8888
modules.Add(("bnrm2d-first",
8989
BatchNorm2d(32)));
9090
modules.Add(("relu-first",
@@ -110,13 +110,13 @@ To illustrate, this is the code for MobileNet from the TorchSharp examples:
110110
var stride = strides[i];
111111

112112
modules.Add(($"conv2d-{i}a",
113-
Conv2d(in_planes, in_planes, kernelSize: 3, stride: stride, padding: 1, groups: in_planes, bias: false)));
113+
Conv2d(in_planes, in_planes, kernel_size: 3, stride: stride, padding: 1, groups: in_planes, bias: false)));
114114
modules.Add(($"bnrm2d-{i}a",
115115
BatchNorm2d(in_planes)));
116116
modules.Add(($"relu-{i}a",
117117
ReLU()));
118118
modules.Add(($"conv2d-{i}b",
119-
Conv2d(in_planes, out_planes, kernelSize: 1L, stride: 1L, padding: 0L, bias: false)));
119+
Conv2d(in_planes, out_planes, kernel_size: 1L, stride: 1L, padding: 0L, bias: false)));
120120
modules.Add(($"bnrm2d-{i}b",
121121
BatchNorm2d(out_planes)));
122122
modules.Add(($"relu-{i}b",

src/Examples/AlexNet.cs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,19 +17,19 @@ class AlexNet : Module<Tensor, Tensor>
1717
public AlexNet(string name, int numClasses, torch.Device device = null) : base(name)
1818
{
1919
features = Sequential(
20-
("c1", Conv2d(3, 64, kernelSize: 3, stride: 2, padding: 1)),
20+
("c1", Conv2d(3, 64, kernel_size: 3, stride: 2, padding: 1)),
2121
("r1", ReLU(inplace: true)),
22-
("mp1", MaxPool2d(kernelSize: new long[] { 2, 2 })),
23-
("c2", Conv2d(64, 192, kernelSize: 3, padding: 1)),
22+
("mp1", MaxPool2d(kernel_size: new long[] { 2, 2 })),
23+
("c2", Conv2d(64, 192, kernel_size: 3, padding: 1)),
2424
("r2", ReLU(inplace: true)),
25-
("mp2", MaxPool2d(kernelSize: new long[] { 2, 2 })),
26-
("c3", Conv2d(192, 384, kernelSize: 3, padding: 1)),
25+
("mp2", MaxPool2d(kernel_size: new long[] { 2, 2 })),
26+
("c3", Conv2d(192, 384, kernel_size: 3, padding: 1)),
2727
("r3", ReLU(inplace: true)),
28-
("c4", Conv2d(384, 256, kernelSize: 3, padding: 1)),
28+
("c4", Conv2d(384, 256, kernel_size: 3, padding: 1)),
2929
("r4", ReLU(inplace: true)),
30-
("c5", Conv2d(256, 256, kernelSize: 3, padding: 1)),
30+
("c5", Conv2d(256, 256, kernel_size: 3, padding: 1)),
3131
("r5", ReLU(inplace: true)),
32-
("mp3", MaxPool2d(kernelSize: new long[] { 2, 2 })));
32+
("mp3", MaxPool2d(kernel_size: new long[] { 2, 2 })));
3333

3434
avgPool = AdaptiveAvgPool2d(new long[] { 2, 2 });
3535

src/Examples/MNIST.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ internal class Model : Module<Tensor, Tensor>
105105

106106
// These don't have any parameters, so the only reason to instantiate
107107
// them is performance, since they will be used over and over.
108-
private Module<Tensor, Tensor> pool1 = MaxPool2d(kernelSize: new long[] { 2, 2 });
108+
private Module<Tensor, Tensor> pool1 = MaxPool2d(kernel_size: new long[] { 2, 2 });
109109

110110
private Module<Tensor, Tensor> relu1 = ReLU();
111111
private Module<Tensor, Tensor> relu2 = ReLU();

src/Examples/MobileNet.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ public MobileNet(string name, int numClasses, Device device = null) : base(name)
3030

3131
var modules = new List<(string, Module<Tensor, Tensor>)>();
3232

33-
modules.Add(($"conv2d-first", Conv2d(3, 32, kernelSize: 3, stride: 1, padding: 1, bias: false)));
33+
modules.Add(($"conv2d-first", Conv2d(3, 32, kernel_size: 3, stride: 1, padding: 1, bias: false)));
3434
modules.Add(($"bnrm2d-first", BatchNorm2d(32)));
3535
modules.Add(($"relu-first", ReLU()));
3636
MakeLayers(modules, 32);
@@ -53,10 +53,10 @@ private void MakeLayers(List<(string, Module<Tensor, Tensor>)> modules, long in_
5353
var out_planes = planes[i];
5454
var stride = strides[i];
5555

56-
modules.Add(($"conv2d-{i}a", Conv2d(in_planes, in_planes, kernelSize: 3, stride: stride, padding: 1, groups: in_planes, bias: false)));
56+
modules.Add(($"conv2d-{i}a", Conv2d(in_planes, in_planes, kernel_size: 3, stride: stride, padding: 1, groups: in_planes, bias: false)));
5757
modules.Add(($"bnrm2d-{i}a", BatchNorm2d(in_planes)));
5858
modules.Add(($"relu-{i}a", ReLU()));
59-
modules.Add(($"conv2d-{i}b", Conv2d(in_planes, out_planes, kernelSize: 1L, stride: 1L, padding: 0L, bias: false)));
59+
modules.Add(($"conv2d-{i}b", Conv2d(in_planes, out_planes, kernel_size: 1L, stride: 1L, padding: 0L, bias: false)));
6060
modules.Add(($"bnrm2d-{i}b", BatchNorm2d(out_planes)));
6161
modules.Add(($"relu-{i}b", ReLU()));
6262

src/Examples/ResNet.cs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ public ResNet(string name, Func<string, int,int,int,Module<Tensor, Tensor>> bloc
7272
{
7373
var modules = new List<(string, Module<Tensor, Tensor>)>();
7474

75-
modules.Add(($"conv2d-first", Conv2d(3, 64, kernelSize: 3, stride: 1, padding: 1, bias: false)));
75+
modules.Add(($"conv2d-first", Conv2d(3, 64, kernel_size: 3, stride: 1, padding: 1, bias: false)));
7676
modules.Add(($"bnrm2d-first", BatchNorm2d(64)));
7777
modules.Add(($"relu-first", ReLU(inplace:true)));
7878
MakeLayer(modules, block, expansion, 64, num_blocks[0], 1);
@@ -124,17 +124,17 @@ public BasicBlock (string name, int in_planes, int planes, int stride) : base(na
124124
{
125125
var modules = new List<(string, Module<Tensor, Tensor>)>();
126126

127-
modules.Add(($"{name}-conv2d-1", Conv2d(in_planes, planes, kernelSize: 3, stride: stride, padding: 1, bias: false)));
127+
modules.Add(($"{name}-conv2d-1", Conv2d(in_planes, planes, kernel_size: 3, stride: stride, padding: 1, bias: false)));
128128
modules.Add(($"{name}-bnrm2d-1", BatchNorm2d(planes)));
129129
modules.Add(($"{name}-relu-1", ReLU(inplace: true)));
130-
modules.Add(($"{name}-conv2d-2", Conv2d(planes, planes, kernelSize: 3, stride: 1, padding: 1, bias: false)));
130+
modules.Add(($"{name}-conv2d-2", Conv2d(planes, planes, kernel_size: 3, stride: 1, padding: 1, bias: false)));
131131
modules.Add(($"{name}-bnrm2d-2", BatchNorm2d(planes)));
132132

133133
layers = Sequential(modules);
134134

135135
if (stride != 1 || in_planes != expansion*planes) {
136136
shortcut = Sequential(
137-
($"{name}-conv2d-3", Conv2d(in_planes, expansion * planes, kernelSize: 1, stride: stride, bias: false)),
137+
($"{name}-conv2d-3", Conv2d(in_planes, expansion * planes, kernel_size: 1, stride: stride, bias: false)),
138138
($"{name}-bnrm2d-3", BatchNorm2d(expansion * planes)));
139139
}
140140
else {
@@ -175,20 +175,20 @@ public Bottleneck(string name, int in_planes, int planes, int stride) : base(nam
175175
{
176176
var modules = new List<(string, Module<Tensor, Tensor>)>();
177177

178-
modules.Add(($"{name}-conv2d-1", Conv2d(in_planes, planes, kernelSize: 1, bias: false)));
178+
modules.Add(($"{name}-conv2d-1", Conv2d(in_planes, planes, kernel_size: 1, bias: false)));
179179
modules.Add(($"{name}-bnrm2d-1", BatchNorm2d(planes)));
180180
modules.Add(($"{name}relu-1", ReLU(inplace:true)));
181-
modules.Add(($"{name}-conv2d-2", Conv2d(planes, planes, kernelSize: 3, stride: stride, padding: 1, bias: false)));
181+
modules.Add(($"{name}-conv2d-2", Conv2d(planes, planes, kernel_size: 3, stride: stride, padding: 1, bias: false)));
182182
modules.Add(($"{name}-bnrm2d-2", BatchNorm2d(planes)));
183183
modules.Add(($"{name}relu-2", ReLU(inplace: true)));
184-
modules.Add(($"{name}-conv2d-3", Conv2d(planes, expansion * planes, kernelSize: 1, bias: false)));
184+
modules.Add(($"{name}-conv2d-3", Conv2d(planes, expansion * planes, kernel_size: 1, bias: false)));
185185
modules.Add(($"{name}-bnrm2d-3", BatchNorm2d(expansion * planes)));
186186

187187
layers = Sequential(modules);
188188

189189
if (stride != 1 || in_planes != expansion * planes) {
190190
shortcut = Sequential(
191-
($"{name}-conv2d-4", Conv2d(in_planes, expansion * planes, kernelSize: 1, stride: stride, bias: false)),
191+
($"{name}-conv2d-4", Conv2d(in_planes, expansion * planes, kernel_size: 1, stride: stride, bias: false)),
192192
($"{name}-bnrm2d-4", BatchNorm2d(expansion * planes)));
193193
} else {
194194
shortcut = Sequential();

src/Examples/SpeechCommands.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -235,16 +235,16 @@ internal class M5 : Module<Tensor, Tensor>
235235

236236
public M5(string name, int n_input = 1, int n_output = 35, int stride = 16, int n_channel = 32) : base(name)
237237
{
238-
conv1 = nn.Conv1d(n_input, n_channel, kernelSize: 80, stride: stride);
238+
conv1 = nn.Conv1d(n_input, n_channel, kernel_size: 80, stride: stride);
239239
bn1 = nn.BatchNorm1d(n_channel);
240240
pool1 = nn.MaxPool1d(4);
241-
conv2 = nn.Conv1d(n_channel, n_channel, kernelSize: 3);
241+
conv2 = nn.Conv1d(n_channel, n_channel, kernel_size: 3);
242242
bn2 = nn.BatchNorm1d(n_channel);
243243
pool2 = nn.MaxPool1d(4);
244-
conv3 = nn.Conv1d(n_channel, 2 * n_channel, kernelSize: 3);
244+
conv3 = nn.Conv1d(n_channel, 2 * n_channel, kernel_size: 3);
245245
bn3 = nn.BatchNorm1d(2 * n_channel);
246246
pool3 = nn.MaxPool1d(4);
247-
conv4 = nn.Conv1d(2 * n_channel, 2 * n_channel, kernelSize: 3);
247+
conv4 = nn.Conv1d(2 * n_channel, 2 * n_channel, kernel_size: 3);
248248
bn4 = nn.BatchNorm1d(2 * n_channel);
249249
pool4 = nn.MaxPool1d(4);
250250
fc1 = nn.Linear(2 * n_channel, n_output);

src/Examples/VGG.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,9 +38,9 @@ public VGG(string name, int numClasses, Device device = null) : base(name)
3838
for (var i = 0; i < channels.Length; i++) {
3939

4040
if (channels[i] == 0) {
41-
modules.Add(($"MaxPool2d-{i}a", MaxPool2d(kernelSize: 2, stride: 2)));
41+
modules.Add(($"MaxPool2d-{i}a", MaxPool2d(kernel_size: 2, stride: 2)));
4242
} else {
43-
modules.Add(($"conv2d-{i}a", Conv2d(in_channels, channels[i], kernelSize: 3, padding: 1)));
43+
modules.Add(($"conv2d-{i}a", Conv2d(in_channels, channels[i], kernel_size: 3, padding: 1)));
4444
modules.Add(($"bnrm2d-{i}a", BatchNorm2d(channels[i])));
4545
modules.Add(($"relu-{i}b", ReLU(inplace: true)));
4646
in_channels = channels[i];

0 commit comments

Comments
 (0)