Skip to content

Commit 0fddb4a

Browse files
committed
Rework MPS randn fix, add randn_like fix
torch.manual_seed() already sets a CPU generator, so there is no reason to create a CPU generator manually. torch.randn_like also needs a MPS fix for k-diffusion, but a torch hijack with randn_like already exists so it can also be used for that.
1 parent 4d5f169 commit 0fddb4a

File tree

2 files changed

+8
-15
lines changed

2 files changed

+8
-15
lines changed

modules/devices.py

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -66,24 +66,15 @@ def enable_tf32():
6666

6767

6868
def randn(seed, shape):
69-
# Pytorch currently doesn't handle setting randomness correctly when the metal backend is used.
70-
if device.type == 'mps':
71-
generator = torch.Generator(device=cpu)
72-
generator.manual_seed(seed)
73-
noise = torch.randn(shape, generator=generator, device=cpu).to(device)
74-
return noise
75-
7669
torch.manual_seed(seed)
70+
if device.type == 'mps':
71+
return torch.randn(shape, device=cpu).to(device)
7772
return torch.randn(shape, device=device)
7873

7974

8075
def randn_without_seed(shape):
81-
# Pytorch currently doesn't handle setting randomness correctly when the metal backend is used.
8276
if device.type == 'mps':
83-
generator = torch.Generator(device=cpu)
84-
noise = torch.randn(shape, generator=generator, device=cpu).to(device)
85-
return noise
86-
77+
return torch.randn(shape, device=cpu).to(device)
8778
return torch.randn(shape, device=device)
8879

8980

modules/sd_samplers.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -365,7 +365,10 @@ def randn_like(self, x):
365365
if noise.shape == x.shape:
366366
return noise
367367

368-
return torch.randn_like(x)
368+
if x.device.type == 'mps':
369+
return torch.randn_like(x, device=devices.cpu).to(x.device)
370+
else:
371+
return torch.randn_like(x)
369372

370373

371374
# MPS fix for randn in torchsde
@@ -429,8 +432,7 @@ def initialize(self, p):
429432
self.model_wrap.step = 0
430433
self.eta = p.eta or opts.eta_ancestral
431434

432-
if self.sampler_noises is not None:
433-
k_diffusion.sampling.torch = TorchHijack(self.sampler_noises)
435+
k_diffusion.sampling.torch = TorchHijack(self.sampler_noises if self.sampler_noises is not None else [])
434436

435437
extra_params_kwargs = {}
436438
for param_name in self.extra_params:

0 commit comments

Comments
 (0)