Skip to content

Commit 2e7befc

Browse files
Pyre Bot Jr.facebook-github-bot
authored andcommitted
upgrade pyre version in fbcode/executorch - batch 1
Differential Revision: D64977682
1 parent e93ad5f commit 2e7befc

File tree

37 files changed

+76
-34
lines changed

37 files changed

+76
-34
lines changed

backends/cadence/aot/quantizer/fusion_pass.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -331,6 +331,7 @@ def __init__(self, patterns) -> None:
331331
# pyre-ignore[4]: Parameter `patterns` of class `QuantFusion` has no type specified
332332
self.patterns = patterns
333333

334+
# pyre-fixme[7]: Expected `PassResult` but got implicit return value of `None`.
334335
def call(self, graph_module: fx.GraphModule) -> PassResult: # noqa: C901
335336
for pattern in self.patterns:
336337
fused_partitions = find_sequential_partitions_aten(
@@ -453,7 +454,6 @@ def call(self, graph_module: fx.GraphModule) -> PassResult: # noqa: C901
453454

454455
legalize_graph(graph_module)
455456
graph_module.graph.eliminate_dead_code()
456-
# pyre-fixme[7]: Incompatible return type
457457
graph_module.recompile()
458458

459459
@classmethod
@@ -463,7 +463,7 @@ def is_fused(cls, nodes) -> bool:
463463

464464
@classmethod
465465
# pyre-ignore[2]: Parameter `nodes` has no type specified
466+
# pyre-fixme[7]: Expected `bool` but got implicit return value of `None`.
466467
def mark_fused(cls, nodes) -> bool:
467468
for n in nodes:
468-
# pyre-fixme[7]: Incompatible return type
469469
n.meta["QuantFusion"] = True

backends/vulkan/_passes/insert_prepack_nodes.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,9 @@ def is_non_weight_param_tensor(node: torch.fx.Node) -> bool:
6464

6565
for user in node.users:
6666
if user.op == "call_function" and (
67-
# pyre-ignore [16]
6867
user.target in USES_WEIGHTS
68+
# pyre-fixme[16]: Item `Callable` of `(...) -> Any | str` has no
69+
# attribute `name`.
6970
or user.target.name() in USES_WEIGHTS
7071
):
7172
return False

backends/vulkan/runtime/gen_vulkan_spv.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -520,6 +520,7 @@ def preprocess(
520520
last_indent = input_indent
521521

522522
while blank_lines != 0:
523+
# pyre-fixme[61]: `python_indent` is undefined, or not always defined.
523524
python_lines.append(python_indent + "print(file=OUT_STREAM)")
524525
blank_lines -= 1
525526

backends/vulkan/test/test_vulkan_delegate.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333

3434

3535
class TestBackends(unittest.TestCase):
36+
# pyre-fixme[11]: Annotation `EdgeCompileConfig` is not defined as a type.
3637
_edge_compile_config: EdgeCompileConfig = EdgeCompileConfig(
3738
_skip_dim_order=True, # TODO(T182928844): Delegate dim order op to backend.
3839
)

backends/xnnpack/operators/quant_params.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,10 +113,12 @@ def quantize_tensor(self, tensor: torch.Tensor) -> torch.Tensor:
113113
), f"Not expecting per channel group quantization, got q dtype: {self.dtype}, tensor.dtype {tensor.dtype}"
114114
assert (
115115
tensor.shape[self.axis] == cast(torch.Tensor, self.scale).shape[0]
116+
# pyre-fixme[16]: Item `float` of `float | Tensor` has no attribute `shape`.
116117
), f"Invalid size of per channel quantization scales, axis: {self.axis}, scale size: {self.scale.shape}, tensor shape: {tensor.shape}"
117118

118119
assert (
119120
tensor.shape[self.axis] == cast(torch.Tensor, self.zp).shape[0]
121+
# pyre-fixme[16]: Item `float` of `float | Tensor` has no attribute `shape`.
120122
), f"Invalid size of per channel quantization zero-points, axis: {self.axis}, zp size: {self.zp.shape}, tensor shape: {tensor.shape}"
121123

122124
# Assuming folded quant weights

backends/xnnpack/test/models/mobilebert.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212

1313

1414
class TestMobilebert(unittest.TestCase):
15-
# pyre-ignore
1615
mobilebert = MobileBertModel(MobileBertConfig()).eval()
1716
example_inputs = (torch.tensor([[101, 7592, 1010, 2026, 3899, 2003, 10140, 102]]),)
1817
supported_ops = {

backends/xnnpack/test/test_xnnpack_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,6 @@ def forward(self, *args):
240240

241241
# Test the model with executor
242242
executorch_module = _load_for_executorch_from_buffer(executorch_program.buffer)
243-
# pyre-fixme[16]: Module `pytree` has no attribute `tree_flatten`.
244243
inputs_flattened, _ = tree_flatten(sample_inputs)
245244

246245
model_output = executorch_module.run_method("forward", tuple(inputs_flattened))
@@ -452,7 +451,6 @@ def forward(self, x):
452451
)
453452

454453
executorch_module = _load_for_executorch_from_buffer(executorch_program.buffer)
455-
# pyre-fixme[16]: Module `pytree` has no attribute `tree_flatten`.
456454
inputs_flattened, _ = tree_flatten(example_inputs)
457455

458456
model_output = executorch_module.run_method("forward", tuple(inputs_flattened))

backends/xnnpack/test/tester/tester.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,7 @@ def graph_module(self) -> str:
208208

209209
@register_stage
210210
class ToEdge(Stage):
211+
# pyre-fixme[11]: Annotation `EdgeCompileConfig` is not defined as a type.
211212
def __init__(self, edge_compile_config: Optional[EdgeCompileConfig] = None):
212213
self.edge_compile_conf = (
213214
edge_compile_config or get_xnnpack_edge_compile_config()
@@ -262,6 +263,7 @@ def run(
262263
if self.pass_list:
263264
assert isinstance(self.pass_list, list)
264265
for pass_ in self.pass_list:
266+
# pyre-fixme[45]: `Callable` cannot be instantiated.
265267
transformed_ep = _transform(transformed_ep, pass_())
266268

267269
if self.pass_functions:
@@ -337,6 +339,7 @@ def graph_module(self) -> str:
337339
class ToExecutorch(Stage):
338340
def __init__(
339341
self,
342+
# pyre-fixme[11]: Annotation `ExecutorchBackendConfig` is not defined as a type.
340343
config: Optional[ExecutorchBackendConfig] = None,
341344
):
342345
self.config = config or ExecutorchBackendConfig(

backends/xnnpack/utils/configs.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
### XNNPACK Configs ###
1515
def get_xnnpack_edge_compile_config(
1616
skip_dim_order: bool = False,
17+
# pyre-fixme[11]: Annotation `EdgeCompileConfig` is not defined as a type.
1718
) -> exir.EdgeCompileConfig:
1819
return exir.EdgeCompileConfig(
1920
_check_ir_validity=False, _skip_dim_order=skip_dim_order
@@ -27,6 +28,7 @@ def get_transform_passes(additional_passes=None) -> List[PassType]:
2728

2829
def get_xnnpack_executorch_backend_config(
2930
additional_passes=None,
31+
# pyre-fixme[11]: Annotation `ExecutorchBackendConfig` is not defined as a type.
3032
) -> exir.ExecutorchBackendConfig:
3133
additional_passes = additional_passes if additional_passes else []
3234
return exir.ExecutorchBackendConfig(

devtools/bundled_program/core.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -323,6 +323,8 @@ def _assert_valid_bundle(
323323
), "The input tensor {} dtype shall be {}, but now is {}".format(
324324
cur_plan_test_inputs[j],
325325
self._get_input_dtype(program, program_plan_id, j),
326+
# pyre-fixme[16]: Item `bool` of `bool | float | int |
327+
# Tensor` has no attribute `dtype`.
326328
cur_plan_test_inputs[j].dtype,
327329
)
328330
elif type(cur_plan_test_inputs[j]) in (
@@ -354,6 +356,8 @@ def _assert_valid_bundle(
354356
), "The label tensor {} dtype shall be {}, but now is {}".format(
355357
cur_plan_test_expected_outputs[j],
356358
self._get_output_dtype(program, program_plan_id, j),
359+
# pyre-fixme[16]: Item `bool` of `bool | float | int |
360+
# Tensor` has no attribute `dtype`.
357361
cur_plan_test_expected_outputs[j].dtype,
358362
)
359363

0 commit comments

Comments
 (0)