Skip to content

Commit 5c57743

Browse files
fix mypy
1 parent 195bb88 commit 5c57743

File tree

1 file changed

+11
-10
lines changed

1 file changed

+11
-10
lines changed

Tools/cases_generator/partial_evaluator_generator.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
ROOT,
1919
write_header,
2020
Emitter,
21+
TokenIterator,
2122
)
2223
from cwriter import CWriter
2324
from typing import TextIO, Iterator
@@ -48,11 +49,11 @@ def type_name(var: StackItem) -> str:
4849

4950
def var_name(var: StackItem, unused_count: int) -> tuple[str, int]:
5051
if var.name == "unused":
51-
var = f"unused_{unused_count}"
52+
name = f"unused_{unused_count}"
5253
unused_count += 1
5354
else:
54-
var = var.name
55-
return var, unused_count
55+
name = var.name
56+
return name, unused_count
5657

5758

5859
def declare_variables(uop: Uop, out: CWriter) -> None:
@@ -123,14 +124,14 @@ def emit_default(out: CWriter, uop: Uop, stack: Stack) -> None:
123124
class Tier2PEEmitter(Emitter):
124125
def __init__(self, out: CWriter):
125126
super().__init__(out)
126-
self._replacers["MATERIALIZE_INPUTS"] = self.materialize_inputs
127+
self._replacers["MATERIALIZE_INPUTS"] = self.materialize_inputs # type: ignore[assignment]
127128

128129
def materialize_inputs(
129130
self,
130131
tkn: Token,
131-
tkn_iter: Iterator[Token],
132+
tkn_iter: TokenIterator,
132133
uop: Uop,
133-
stack: Stack,
134+
storage: Storage,
134135
inst: Instruction | None,
135136
) -> None:
136137
next(tkn_iter)
@@ -194,11 +195,11 @@ def write_uop(
194195
for var in storage.inputs: # type: ignore[possibly-undefined]
195196
var.defined = False
196197
base_offset = stack.base_offset.copy()
197-
for var in reversed(uop.stack.inputs):
198-
if var.is_array():
198+
for input in reversed(uop.stack.inputs):
199+
if input.is_array():
199200
c_offset = base_offset.to_c()
200-
out.emit(f"{var.name} = &stack_pointer[{c_offset}];\n")
201-
base_offset.push(var)
201+
out.emit(f"{input.name} = &stack_pointer[{c_offset}];\n")
202+
base_offset.push(input)
202203
storage = emitter.emit_tokens(override, storage, None)
203204
out.start_line()
204205
storage.flush(out, cast_type="", extract_bits=False)

0 commit comments

Comments
 (0)