|
18 | 18 | ROOT, |
19 | 19 | write_header, |
20 | 20 | Emitter, |
| 21 | + TokenIterator, |
21 | 22 | ) |
22 | 23 | from cwriter import CWriter |
23 | 24 | from typing import TextIO, Iterator |
@@ -48,11 +49,11 @@ def type_name(var: StackItem) -> str: |
48 | 49 |
|
49 | 50 | def var_name(var: StackItem, unused_count: int) -> tuple[str, int]: |
50 | 51 | if var.name == "unused": |
51 | | - var = f"unused_{unused_count}" |
| 52 | + name = f"unused_{unused_count}" |
52 | 53 | unused_count += 1 |
53 | 54 | else: |
54 | | - var = var.name |
55 | | - return var, unused_count |
| 55 | + name = var.name |
| 56 | + return name, unused_count |
56 | 57 |
|
57 | 58 |
|
58 | 59 | def declare_variables(uop: Uop, out: CWriter) -> None: |
@@ -123,14 +124,14 @@ def emit_default(out: CWriter, uop: Uop, stack: Stack) -> None: |
123 | 124 | class Tier2PEEmitter(Emitter): |
124 | 125 | def __init__(self, out: CWriter): |
125 | 126 | super().__init__(out) |
126 | | - self._replacers["MATERIALIZE_INPUTS"] = self.materialize_inputs |
| 127 | + self._replacers["MATERIALIZE_INPUTS"] = self.materialize_inputs # type: ignore[assignment] |
127 | 128 |
|
128 | 129 | def materialize_inputs( |
129 | 130 | self, |
130 | 131 | tkn: Token, |
131 | | - tkn_iter: Iterator[Token], |
| 132 | + tkn_iter: TokenIterator, |
132 | 133 | uop: Uop, |
133 | | - stack: Stack, |
| 134 | + storage: Storage, |
134 | 135 | inst: Instruction | None, |
135 | 136 | ) -> None: |
136 | 137 | next(tkn_iter) |
@@ -194,11 +195,11 @@ def write_uop( |
194 | 195 | for var in storage.inputs: # type: ignore[possibly-undefined] |
195 | 196 | var.defined = False |
196 | 197 | base_offset = stack.base_offset.copy() |
197 | | - for var in reversed(uop.stack.inputs): |
198 | | - if var.is_array(): |
| 198 | + for input in reversed(uop.stack.inputs): |
| 199 | + if input.is_array(): |
199 | 200 | c_offset = base_offset.to_c() |
200 | | - out.emit(f"{var.name} = &stack_pointer[{c_offset}];\n") |
201 | | - base_offset.push(var) |
| 201 | + out.emit(f"{input.name} = &stack_pointer[{c_offset}];\n") |
| 202 | + base_offset.push(input) |
202 | 203 | storage = emitter.emit_tokens(override, storage, None) |
203 | 204 | out.start_line() |
204 | 205 | storage.flush(out, cast_type="", extract_bits=False) |
|
0 commit comments