Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 43 additions & 3 deletions .lintrunner.toml
Original file line number Diff line number Diff line change
Expand Up @@ -312,9 +312,9 @@ include_patterns = [
# 'devtools/**/*.py',
'devtools/visualization/**/*.py',
'docs/**/*.py',
'exir/**/*.py',
# 'examples/**/*.py',
'examples/openvino/**/*.py',
# 'exir/**/*.py',
# 'extension/**/*.py',
'kernels/**/*.py',
'profiler/**/*.py',
Expand All @@ -327,9 +327,49 @@ include_patterns = [
exclude_patterns = [
'third-party/**',
'**/third-party/**',
'scripts/check_binary_dependencies.py',
'profiler/test/test_profiler_e2e.py',
'backends/arm/test/**',
# exir exclusions (sorted alphabetically)
'exir/_serialize/test/**',
'exir/backend/test/**',
'exir/backend/utils.py',
'exir/dialects/backend/test/**',
'exir/dialects/edge/arg/model.py',
'exir/dialects/edge/op/test/**',
'exir/dialects/edge/spec/**',
'exir/dialects/edge/test/**',
'exir/dialects/test/**',
'exir/emit/_emitter.py',
'exir/emit/test/**',
'exir/lowered_backend_module.py',
'exir/memory_planning.py',
'exir/operator/test/**',
'exir/pass_base.py',
'exir/passes/__init__.py',
'exir/passes/_quant_patterns_and_replacements.py',
'exir/passes/const_prop_pass.py',
'exir/passes/constant_prop_pass.py',
'exir/passes/dynamic_shape_prop_pass.py',
'exir/passes/executorch_prim_ops_registry.py',
'exir/passes/memory_planning_pass.py',
'exir/passes/prune_empty_tensors_pass.py',
'exir/passes/quant_fusion_pass.py',
'exir/passes/quantize_io_pass.py',
'exir/passes/remove_mixed_type_operators.py',
'exir/passes/remove_noop_pass.py',
'exir/passes/replace_view_copy_with_view_pass.py',
'exir/passes/spec_prop_pass.py',
'exir/passes/sym_shape_eval_pass.py',
'exir/passes/sym_to_tensor_pass.py',
'exir/passes/weights_to_outputs_pass.py',
'exir/program/test/**',
'exir/serde/export_serialize.py',
'exir/serde/serialize.py',
'exir/tensor.py',
'exir/tests/**',
'exir/tracer.py',
'exir/verification/test/**',
'profiler/test/test_profiler_e2e.py',
'scripts/check_binary_dependencies.py',
]
command = [
'python3',
Expand Down
6 changes: 6 additions & 0 deletions .mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -100,3 +100,9 @@ ignore_missing_imports = True

[mypy-torchao.*]
follow_untyped_imports = True

[mypy-sympy.*]
ignore_missing_imports = True

[mypy-executorch.exir.verification.bindings]
ignore_missing_imports = True
4 changes: 2 additions & 2 deletions exir/_serialize/_dataclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,5 +141,5 @@ class Example
if isinstance(T, enum.EnumMeta):
data[key] = T[value]
else:
data[key] = T(value)
return cls(**data)
data[key] = T(value) # type: ignore[operator]
return cls(**data) # type: ignore[operator]
8 changes: 4 additions & 4 deletions exir/_serialize/_flatbuffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,10 +193,10 @@ def _run_flatc(args: Sequence[str]) -> None:
subprocess.run([flatc_path] + list(args), check=True)
else:
# Expect the `flatc` tool to be on the system path or set as an env var.
flatc_path = os.getenv("FLATC_EXECUTABLE")
if not flatc_path:
flatc_path = "flatc"
subprocess.run([flatc_path] + list(args), check=True)
flatc_executable = os.getenv("FLATC_EXECUTABLE")
if not flatc_executable:
flatc_executable = "flatc"
subprocess.run([flatc_executable] + list(args), check=True)


def _flatc_compile(output_dir: str, schema_path: str, json_path: str) -> None:
Expand Down
4 changes: 2 additions & 2 deletions exir/_serialize/_named_data_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,8 @@ def _add_named_data_to_map(
if self.data_hash_to_buffer_idx.get(hashed, -1) != buffer_idx:
raise ValueError(
f"Duplicate key {key} with different data. "
f"Existing data: {self.buffers[buffer_idx].buffer}. "
f"New data: {data}."
f"Existing data: {self.buffers[buffer_idx].buffer!r}. "
f"New data: {data!r}." # type: ignore[str-bytes-safe]
)
self.buffers[buffer_idx].alignment = math.lcm(
self.buffers[buffer_idx].alignment, alignment
Expand Down
12 changes: 6 additions & 6 deletions exir/_serialize/_serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

# pyre-strict

from typing import Dict, Optional, Set, Tuple
from typing import Dict, List, Optional, Set, Tuple

from executorch.exir._serialize import _serialize_pte_binary

Expand Down Expand Up @@ -102,10 +102,10 @@ def serialize_for_executorch(
)

for tag in all_external_tags:
buffers = []
buffers: List[bytes] = []
fqn_to_tensor_entry: Dict[str, TensorEntry] = {}
# pyre-ignore[16]: Undefined attribute: `Optional` has no attribute `get`.
fqn_to_index = emitter_output.external_constant_map.get(tag, {})
fqn_to_index = emitter_output.external_constant_map.get(tag, {}) # type: ignore[union-attr]
# Create a TensorEntry for each external tensor.
for fqn, index in fqn_to_index.items():
assert fqn in fqn_to_tensor_layout
Expand All @@ -118,13 +118,13 @@ def serialize_for_executorch(
# Extract external data.
key_to_data: Dict[str, DataEntry] = {}
# pyre-ignore[16]: Undefined attribute: `Optional` has no attribute `get`.
key_to_buffer_index = named_data.external_data.get(tag, {})
key_to_buffer_index = named_data.external_data.get(tag, {}) # type: ignore[union-attr]
for key, index in key_to_buffer_index.items():
# pyre-ignore[16]: Undefined attribute: `Optional` has no attribute `buffers`.
key_to_data[key] = DataEntry(
len(buffers), named_data.buffers[index].alignment
len(buffers), named_data.buffers[index].alignment # type: ignore[union-attr]
)
buffers.append(named_data.buffers[index].buffer)
buffers.append(named_data.buffers[index].buffer) # type: ignore[union-attr]

# Serialize into PTD file.
ptd_files[tag] = data_serializer.serialize(
Expand Down
16 changes: 9 additions & 7 deletions exir/backend/backend_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def to_backend(
compile_specs=compile_specs,
named_data_store_output=preprocess_result.data_store_output,
)
lowered_module.meta = {
lowered_module.meta = { # type: ignore[assignment]
"debug_handle_map": preprocess_result.debug_handle_map
}
return lowered_module
Expand Down Expand Up @@ -311,7 +311,7 @@ def _partition_and_lower_one_graph_module(
is_submodule,
)

lowered_submodule = to_backend(
lowered_submodule = to_backend( # type: ignore[call-arg]
delegation_spec.backend_id,
submodule_program,
delegation_spec.compile_specs,
Expand Down Expand Up @@ -449,7 +449,7 @@ def _create_partitions_in_graph_module(
owning_program: ExportedProgram,
is_submodule: bool,
) -> Dict[str, List[torch.fx.Node]]:
backend_id_to_submodule_name = {}
backend_id_to_submodule_name: Dict[str, List[str]] = {}
for tag, delegation_spec in partition_result.partition_tags.items():
# Create partition with nodes containing this tag. There should only be
# one contained submodule per tag
Expand Down Expand Up @@ -517,10 +517,12 @@ def _create_partitions_in_graph_module(
# in future edits to the graph. As a result, we just keep track of the node's name
# and at the end we search for this node in our final graph module
backend_id_to_submodule_name[delegation_spec.backend_id].append(
call_module_node.target
call_module_node.target # type: ignore[arg-type]
)

created_submodule_nodes = {key: [] for key in backend_id_to_submodule_name.keys()}
created_submodule_nodes: Dict[str, List[torch.fx.Node]] = {
key: [] for key in backend_id_to_submodule_name.keys()
}
for backend_id, submodule_name in backend_id_to_submodule_name.items():
for node in tagged_graph_module.graph.nodes:
if node.op == "call_module" and node.target in submodule_name:
Expand Down Expand Up @@ -615,7 +617,7 @@ def lower_all_submodules_to_backend(
compile_specs=compile_spec,
named_data_store_output=preprocess_result.data_store_output,
)
lowered_module.meta = {
lowered_module.meta = { # type: ignore[assignment]
"debug_handle_map": preprocess_result.debug_handle_map,
}
is_submodule = call_submodule_node.meta["is_submodule"]
Expand Down Expand Up @@ -698,7 +700,7 @@ def to_backend(
method_to_partitioner = method_edge_program_partitioners.method_to_partitioner

partitioned_and_lowered_exported_programs = {}
backend_id_to_method_submodules_map = {}
backend_id_to_method_submodules_map: Dict[str, Dict[str, List[torch.fx.Node]]] = {}
method_to_tagged_exported_program = {}

for method_name, partitioner_instance in method_to_partitioner.items():
Expand Down
6 changes: 3 additions & 3 deletions exir/backend/backend_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,16 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass

from typing import Dict, List, Optional, Tuple, Union
from typing import Any, Callable, Dict, List, Optional, Tuple, Union

from executorch.exir._serialize._named_data_store import NamedDataStoreOutput

from executorch.exir.backend.compile_spec_schema import CompileSpec
from torch.export.exported_program import ExportedProgram


def enforcedmethod(func):
func.__enforcedmethod__ = True
def enforcedmethod(func: Callable[..., Any]) -> Callable[..., Any]:
func.__enforcedmethod__ = True # type: ignore[attr-defined]
return func


Expand Down
7 changes: 3 additions & 4 deletions exir/backend/canonical_partitioners/config_partitioner.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,9 @@ class PartitionerConfig(ABC):
the specified backend.
"""

@classmethod
@property
@property # type: ignore[misc]
@abstractmethod
def target_name(cls) -> str:
def target_name(self) -> str:
"""
Target name for this partitioner config. When the Config-Based Partitioner
encounters a node with a matching target name, it uses this config's methods to
Expand Down Expand Up @@ -138,7 +137,7 @@ def filter_fn(node: torch.fx.Node) -> bool:
"""
if node.op != "call_function":
return False
target_name = format_target_name(node.target.__name__) # pyre-ignore
target_name = format_target_name(node.target.__name__) # type: ignore[union-attr]

if target_name in self.target_partitioner_configs:
config = self.target_partitioner_configs[target_name]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ def _get_attribute_or_constants(
if maybe_param is not None:
constant_or_attribute = maybe_param
elif maybe_buffer is not None:
constant_or_attribute = maybe_buffer
constant_or_attribute = maybe_buffer # type: ignore[assignment]
elif maybe_lifted_tensor is not None:
constant_or_attribute = maybe_lifted_tensor
constant_or_attribute = maybe_lifted_tensor # type: ignore[assignment]
return constant_or_attribute


Expand Down
4 changes: 2 additions & 2 deletions exir/backend/partitioner.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class Partitioner(ABC):
def __init__(
self,
spec: Mapping[Union[str, int, float, bool], object] = MappingProxyType({}),
):
) -> None:
self._spec = spec

def __call__(self, exported_program: ExportedProgram) -> PartitionResult:
Expand All @@ -69,7 +69,7 @@ def __call__(self, exported_program: ExportedProgram) -> PartitionResult:
def spec(self) -> Mapping[Union[str, int, float, bool], object]:
return self._spec

@enforcedmethod
@enforcedmethod # type: ignore[misc]
@abstractmethod
def partition(self, exported_program: ExportedProgram) -> PartitionResult:
"""
Expand Down
10 changes: 5 additions & 5 deletions exir/capture/_capture.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,10 +122,10 @@ def _capture_legacy_do_not_use(f, args) -> ExirExportedProgram:
outputs=[],
# pyre-fixme[6]: For 3rd argument expected `TreeSpec` but got
# `Union[Tensor, Module]`.
in_spec=in_spec,
in_spec=in_spec, # type: ignore[arg-type]
# pyre-fixme[6]: For 4th argument expected `TreeSpec` but got
# `Union[Tensor, Module]`.
out_spec=out_spec,
out_spec=out_spec, # type: ignore[arg-type]
),
)
],
Expand Down Expand Up @@ -207,7 +207,7 @@ def capture( # noqa: C901
if isinstance(f, MethodType) and isinstance(f.__self__, torch.nn.Module):
with patch_forward(f.__self__, f):
ep = export(
cast(torch.nn.Module, f.__self__),
f.__self__, # type: ignore[redundant-cast]
args,
dynamic_shapes=dynamic_shapes,
strict=True,
Expand Down Expand Up @@ -272,7 +272,7 @@ def graph_with_interpreter(*args):
graph_with_interpreter,
remove="mutations_and_views",
)
assert isinstance(functionalized_callable, Callable)
assert callable(functionalized_callable) # type: ignore[arg-type]

if config.enable_dynamic_shape:
fake_tensor_mode = FakeTensorMode(
Expand Down Expand Up @@ -357,7 +357,7 @@ def convert_to_fake(x):
in_spec=in_spec,
# pyre-fixme[6]: For 4th argument expected `TreeSpec` but got
# `Union[None, TreeSpec, Tensor, Module]`.
out_spec=out_spec,
out_spec=out_spec, # type: ignore[arg-type]
),
)
],
Expand Down
2 changes: 1 addition & 1 deletion exir/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,9 @@ def override_logger(
try:
oldLevel = logging.root.level
logging.root.setLevel(newLevel)
oldFormatters = []
if fmtstr:
newformatter = logging.Formatter(fmtstr, None, "%")
oldFormatters = []
for handler in logging.root.handlers:
oldFormatters.append(handler.formatter)
handler.formatter = newformatter
Expand Down
6 changes: 3 additions & 3 deletions exir/control_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def _make_submodule(
f"Expect function '{fn.__name__}' to be decorated with tracing_context.",
)
# pyre-ignore
args = fn.__tracing_inputs__
args = fn.__tracing_inputs__ # type: ignore[attr-defined]
# TODO(yidi): we don't want to enable here because we are not gonna use this code path in the future anyways
gm, _ = flattened_dispatch_trace(fn, args, set(), enable_functionalization=False)
output = next(iter(reversed(gm.graph.nodes)))
Expand All @@ -122,7 +122,7 @@ def _make_submodule(
output.args = tuple(output.args[0])
gm.recompile()
# pyre-fixme[16]: `GraphModule` has no attribute `__tracing_inputs__`.
gm.__tracing_inputs__ = args
gm.__tracing_inputs__ = args # type: ignore[attr-defined]
return gm


Expand Down Expand Up @@ -198,7 +198,7 @@ def wrapper(

return f(*args)

wrapper.__tracing_inputs__ = inputs # pyre-ignore
wrapper.__tracing_inputs__ = inputs # type: ignore[attr-defined]
return wrapper

return decorator
8 changes: 5 additions & 3 deletions exir/delegate.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,11 @@
LOWERED_BACKEND_MODULE_TYPE = "LoweredBackendModule"

# pyre-ignore
def trace_call_delegate(proxy_mode, func_overload, lowered_module, *args):
def trace_call_delegate(
proxy_mode: Any, func_overload: Any, lowered_module: Any, *args: Any
) -> Any:
# pyre-ignore
def _unwrap_proxy(e):
def _unwrap_proxy(e: Any) -> Any:
if not isinstance(e, (torch.Tensor, torch.SymInt, torch.SymFloat)):
return e
return get_proxy_slot(
Expand Down Expand Up @@ -151,7 +153,7 @@ def is_lowered_module(obj: Any) -> bool:
def get_lowered_module_name(
root: torch.nn.Module,
# pyre-ignore: Undefined or invalid type [11]: Annotation `LoweredBackendModule` is not defined as a type.
lowered_module: LOWERED_BACKEND_MODULE_TYPE, # noqa
lowered_module: Any, # noqa
) -> str:
"""
Adds the given lowered_module into the given root module and returns the
Expand Down
2 changes: 1 addition & 1 deletion exir/delegate.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,5 @@ def is_lowered_module(obj: Any) -> bool: ...
def get_lowered_module_name(
root: torch.nn.Module,
# pyre-ignore: Undefined or invalid type [11]: Annotation `LoweredBackendModule` is not defined as a type.
lowered_module: LOWERED_BACKEND_MODULE_TYPE, # noqa
lowered_module: Any, # noqa
) -> str: ...
Loading
Loading