Skip to content

Commit 006dd0a

Browse files
committed
Remove all unused variable assignment
1 parent 9e2fadd commit 006dd0a

File tree

28 files changed

+13
-64
lines changed

28 files changed

+13
-64
lines changed

.ci/update_windows/update.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,12 @@ def pull(repo, remote_name='origin', branch='master'):
3333

3434
user = repo.default_signature
3535
tree = repo.index.write_tree()
36-
commit = repo.create_commit('HEAD',
37-
user,
38-
user,
39-
'Merge!',
40-
tree,
41-
[repo.head.target, remote_master_id])
36+
repo.create_commit('HEAD',
37+
user,
38+
user,
39+
'Merge!',
40+
tree,
41+
[repo.head.target, remote_master_id])
4242
# We need to do this or git CLI will think we are still merging.
4343
repo.state_cleanup()
4444
else:

comfy/cldm/cldm.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -413,7 +413,6 @@ def forward(self, x, hint, timesteps, context, y=None, **kwargs):
413413
out_output = []
414414
out_middle = []
415415

416-
hs = []
417416
if self.num_classes is not None:
418417
assert y.shape[0] == x.shape[0]
419418
emb = emb + self.label_emb(y)

comfy/controlnet.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,6 @@ class ControlLoraOps:
297297
class Linear(torch.nn.Module, comfy.ops.CastWeightBiasOp):
298298
def __init__(self, in_features: int, out_features: int, bias: bool = True,
299299
device=None, dtype=None) -> None:
300-
factory_kwargs = {'device': device, 'dtype': dtype}
301300
super().__init__()
302301
self.in_features = in_features
303302
self.out_features = out_features
@@ -382,7 +381,6 @@ class control_lora_ops(ControlLoraOps, comfy.ops.manual_cast):
382381
self.control_model.to(comfy.model_management.get_torch_device())
383382
diffusion_model = model.diffusion_model
384383
sd = diffusion_model.state_dict()
385-
cm = self.control_model.state_dict()
386384

387385
for k in sd:
388386
weight = sd[k]

comfy/extra_samplers/uni_pc.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -703,7 +703,6 @@ def sample(self, x, timesteps, t_start=None, t_end=None, order=3, skip_type='tim
703703
):
704704
# t_0 = 1. / self.noise_schedule.total_N if t_end is None else t_end
705705
# t_T = self.noise_schedule.T if t_start is None else t_start
706-
device = x.device
707706
steps = len(timesteps) - 1
708707
if method == 'multistep':
709708
assert steps >= order

comfy/hooks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ def add_hook_patches(self, model: 'ModelPatcher', model_options: dict, target: E
130130
weights = self.weights
131131
else:
132132
weights = self.weights_clip
133-
k = model.add_hook_patches(hook=self, patches=weights, strength_patch=strength)
133+
model.add_hook_patches(hook=self, patches=weights, strength_patch=strength)
134134
registered.append(self)
135135
return True
136136
# TODO: add logs about any keys that were not applied

comfy/k_diffusion/deis.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
# Transfer from the input time (sigma) used in EDM to that (t) used in DEIS.
1212

1313
def edm2t(edm_steps, epsilon_s=1e-3, sigma_min=0.002, sigma_max=80):
14-
vp_sigma = lambda beta_d, beta_min: lambda t: (np.e ** (0.5 * beta_d * (t ** 2) + beta_min * t) - 1) ** 0.5
1514
vp_sigma_inv = lambda beta_d, beta_min: lambda sigma: ((beta_min ** 2 + 2 * beta_d * (sigma ** 2 + 1).log()).sqrt() - beta_min) / beta_d
1615
vp_beta_d = 2 * (np.log(torch.tensor(sigma_min).cpu() ** 2 + 1) / epsilon_s - np.log(torch.tensor(sigma_max).cpu() ** 2 + 1)) / (epsilon_s - 1)
1716
vp_beta_min = np.log(torch.tensor(sigma_max).cpu() ** 2 + 1) - 0.5 * vp_beta_d

comfy/ldm/audio/dit.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,6 @@ def forward_from_seq_len(self, seq_len, device, dtype):
158158
def forward(self, t):
159159
# device = self.inv_freq.device
160160
device = t.device
161-
dtype = t.dtype
162161

163162
# t = t.to(torch.float32)
164163

@@ -346,18 +345,13 @@ def forward(
346345

347346
# determine masking
348347
masks = []
349-
final_attn_mask = None # The mask that will be applied to the attention matrix, taking all masks into account
350348

351349
if input_mask is not None:
352350
input_mask = rearrange(input_mask, 'b j -> b 1 1 j')
353351
masks.append(~input_mask)
354352

355353
# Other masks will be added here later
356-
357-
if len(masks) > 0:
358-
final_attn_mask = ~or_reduce(masks)
359-
360-
n, device = q.shape[-2], q.device
354+
n = q.shape[-2]
361355

362356
causal = self.causal if causal is None else causal
363357

comfy/ldm/aura/mmdit.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,6 @@ def forward(self, c, x):
147147

148148
bsz, seqlen1, _ = c.shape
149149
bsz, seqlen2, _ = x.shape
150-
seqlen = seqlen1 + seqlen2
151150

152151
cq, ck, cv = self.w1q(c), self.w1k(c), self.w1v(c)
153152
cq = cq.view(bsz, seqlen1, self.n_heads, self.head_dim)

comfy/ldm/genmo/joint_model/asymm_models_joint.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -461,8 +461,6 @@ def prepare(
461461
pH, pW = H // self.patch_size, W // self.patch_size
462462
x = self.embed_x(x) # (B, N, D), where N = T * H * W / patch_size ** 2
463463
assert x.ndim == 3
464-
B = x.size(0)
465-
466464

467465
pH, pW = H // self.patch_size, W // self.patch_size
468466
N = T * pH * pW

comfy/ldm/hydit/controlnet.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -164,9 +164,6 @@ def __init__(
164164
),
165165
)
166166

167-
# Image embedding
168-
num_patches = self.x_embedder.num_patches
169-
170167
# HUnYuanDiT Blocks
171168
self.blocks = nn.ModuleList(
172169
[

0 commit comments

Comments
 (0)