Skip to content

Commit fa9c07d

Browse files
committed
no longer DetachFromDisposeScope in set_grad
1 parent 7ecf737 commit fa9c07d

File tree

3 files changed

+7
-6
lines changed

3 files changed

+7
-6
lines changed

RELEASENOTES.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ __Breaking Changes__:
99
__API Changes__:
1010

1111
- #1291 `Tensor.grad()` and `Tensor.set_grad()` have been replaced by a new property `Tensor.grad`.
12+
- A potential memory leak caused by `set_grad` has been resolved.
1213

1314
__Bug Fixes__:
1415

src/TorchSharp/NN/Module.cs

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -254,14 +254,15 @@ private void _toEpilog(ScalarType? dtype, Device device)
254254
// disable grad we would need to call .detach() on the moved tensor.
255255
using (var d = torch.no_grad()) {
256256
p = new Parameter(
257-
param.to(paramType, device ?? param.device).DetachFromDisposeScope(), param.requires_grad)
258-
.DetachFromDisposeScope() as Parameter;
257+
data: param.to(paramType, device ?? param.device),
258+
requires_grad: param.requires_grad);
259+
_ = p.DetachFromDisposeScope();
259260

260261
// Copy the gradient over as well, if it exists
261262
if (grad is not null) {
262-
p.grad = grad.to(paramType, device ?? param.device)
263-
.with_requires_grad(grad.requires_grad)
264-
.MoveToOtherDisposeScope(p);
263+
using var newGrad = grad.to(paramType, device ?? param.device)
264+
.with_requires_grad(grad.requires_grad);
265+
p.grad = newGrad;
265266
}
266267

267268
// Dispose the param

src/TorchSharp/Tensor/Tensor.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1352,7 +1352,6 @@ public Tensor? grad {
13521352
return new Tensor(res);
13531353
}
13541354
set {
1355-
_ = value?.DetachFromDisposeScope();
13561355
NativeMethods.THSTensor_set_grad(Handle, value?.Handle ?? IntPtr.Zero);
13571356
CheckForErrors();
13581357
}

0 commit comments

Comments
 (0)