Skip to content

Commit 5125872

Browse files
cyyeverpytorchmergebot
authored andcommitted
Fix unused assignments (pytorch#166791)
This PR cleans up unused assignments. Pull Request resolved: pytorch#166791 Approved by: https://github.com/xmfan
1 parent c10975d commit 5125872

File tree

3 files changed

+18
-22
lines changed

3 files changed

+18
-22
lines changed

test/cpp_extensions/open_registration_extension/torch_openreg/tests/test_ops.py

Lines changed: 18 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,7 @@ def test_scaled_dot_product_fused_attention_overrideable_backward(self):
246246
max_k,
247247
philox_seed,
248248
philox_offset,
249-
debug_attn_mask,
249+
_debug_attn_mask,
250250
) = torch.ops.aten._scaled_dot_product_fused_attention_overrideable(
251251
q_privateuse1, k_privateuse1, v_privateuse1, attn_bias=attn_mask_privateuse1
252252
)
@@ -256,25 +256,23 @@ def test_scaled_dot_product_fused_attention_overrideable_backward(self):
256256
)
257257
rand_upward_privateuse1 = rand_upward.to("openreg")
258258
grad_input_mask = [True, True, True, True]
259-
grad_q, grad_k, grad_v, grad_attn_mask = (
260-
torch.ops.aten._scaled_dot_product_fused_attention_overrideable_backward(
261-
rand_upward_privateuse1,
262-
q_privateuse1,
263-
k_privateuse1,
264-
v_privateuse1,
265-
attn_mask_privateuse1,
266-
grad_input_mask,
267-
output,
268-
logsumexp,
269-
cum_seq_q,
270-
cum_seq_k,
271-
max_q,
272-
max_k,
273-
dropout_p=0.0,
274-
is_causal=False,
275-
philox_seed=philox_seed,
276-
philox_offset=philox_offset,
277-
)
259+
torch.ops.aten._scaled_dot_product_fused_attention_overrideable_backward(
260+
rand_upward_privateuse1,
261+
q_privateuse1,
262+
k_privateuse1,
263+
v_privateuse1,
264+
attn_mask_privateuse1,
265+
grad_input_mask,
266+
output,
267+
logsumexp,
268+
cum_seq_q,
269+
cum_seq_k,
270+
max_q,
271+
max_k,
272+
dropout_p=0.0,
273+
is_causal=False,
274+
philox_seed=philox_seed,
275+
philox_offset=philox_offset,
278276
)
279277

280278

torch/_dynamo/compiled_autograd.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -755,7 +755,6 @@ def proxy_call(
755755
self, fn: Callable[..., Any], args: Any, output_metadata: Sequence[Any]
756756
) -> Sequence[torch.Tensor]:
757757
"""Proxies a call to fn(*args) into the graph"""
758-
flat_args, _ = pytree.tree_flatten(args)
759758
proxy_args = pytree.tree_map(lambda e: self.to_proxy(e), args)
760759
proxy_out = self.fx_tracer.create_proxy(
761760
"call_function", fn, args=proxy_args, kwargs={}

torch/_dynamo/guards.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2120,7 +2120,6 @@ def hooks_ids_fn(
21202120
if not are_inline_hooks(hooks):
21212121
return None
21222122

2123-
pack_hook, unpack_hook = hooks
21242123
return tuple(map(id, hooks))
21252124

21262125
guard_hooks_ids = hooks_ids_fn(get_hooks())

0 commit comments

Comments
 (0)