Skip to content

Commit b6709c3

Browse files
committed
Update
[ghstack-poisoned]
2 parents b7d89af + f121a8d commit b6709c3

File tree

11 files changed

+329
-11
lines changed

11 files changed

+329
-11
lines changed
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
name: Add Open External Contributor PRs and Issues to PyTorch Org Project 136
2+
3+
on:
4+
schedule:
5+
- cron: '0 * * * *'
6+
workflow_dispatch:
7+
8+
jobs:
9+
add_to_project:
10+
runs-on: ubuntu-latest
11+
steps:
12+
- name: Add open issues and open, non-draft PRs to org project (excluding certain authors)
13+
uses: actions/github-script@v7
14+
with:
15+
github-token: ${{ secrets.PYTORCH_PROJECT_PAT }}
16+
script: |
17+
const projectId = "PVT_kwDOAUB9vs4A_PUL"; // PyTorch org project 136
18+
const owner = 'pytorch';
19+
const repo = 'executorch';
20+
21+
// List of authors to exclude
22+
const excludedAuthors = new Set([
23+
"nil-is-all", "cbilgin", "KimishPatel", "psiddh", "digantdesai", "SS-JIA", "ahmtox", "mcr229", "shoumikhin",
24+
"manuelcandales", "metascroy", "cccclai", "rohansjoshi", "kirklandsign", "abhinaykukkadapu", "JacobSzwejbka",
25+
"Conarnar", "lucylq", "larryliu0820", "BujSet", "Gasoonjia", "Juntian777", "guangy10", "jackzhxng",
26+
"GregoryComer", "leafs1", "swolchok", "mergennachin", "tarun292", "byjlw", "jathu", "Jack-Khuu", "georgehong",
27+
"zhenyan-zhang-meta", "silverguo", "dbort", "jorgep31415", "huydhn", "mcremon-meta", "trivedivivek", "angelayi",
28+
"helunwencser", "hsharma35", "zhxchen17", "iseeyuan", "svekars", "nathanaelsee", "dulinriley", "jerryzh168",
29+
"cmodi-meta", "bigfootjon", "sxu", "ydwu4", "Riandy", "tugsbayasgalan", "bsoyluoglu", "yangw-dev", "YIWENX14",
30+
"namanahuja", "yushangdi", "limintang", "pianpwk", "viveknayakatmeta", "andreanicastro", "JakeStevens",
31+
"gmagogsfm", "zonglinpeng", "eigen-k", "derekxu", "salilsdesai", "skrtskrtfb", "pssrawat", "r-barnes", "pytorchbot",
32+
"pytorchmergebot", "pytorchupdatebot", "facebook-github-bot", "Erik-Lundell", "zingo", "AdrianLundell",
33+
"oscarandersson8218", "per", "Sebastian-Larsson", "SaoirseARM", "robell", "mansnils", "martinlsm", "freddan80",
34+
"YufengShi-dudu", "tom-arm", "perheld", "Jerry-Ge", "gggekov", "fumchin", "wwwind", "haowhsu-quic", "shewu-quic",
35+
"winskuo-quic", "chunit-quic", "DannyYuyang-quic", "chuntl", "cymbalrush", "DenisVieriu97", "billmguo",
36+
"StrycekSimon", "jirioc", "robert-kalmar", "skywall", "neuropilot-captain"
37+
]);
38+
39+
async function addItem(contentId, type, number) {
40+
try {
41+
await github.graphql(`
42+
mutation {
43+
addProjectV2ItemById(input: {projectId: "${projectId}", contentId: "${contentId}"}) {
44+
item { id }
45+
}
46+
}
47+
`);
48+
console.log(`Added ${type} #${number} to project`);
49+
} catch (error) {
50+
if (error.message && error.message.includes("A project item already exists for this content")) {
51+
// Ignore if already exists
52+
console.log(`${type} #${number} already in project`);
53+
} else {
54+
console.log(`Error adding ${type} #${number}: ${error.message}`);
55+
}
56+
}
57+
}
58+
59+
try {
60+
// Add open issues (not PRs) and exclude by author
61+
const issues = await github.paginate(
62+
github.rest.issues.listForRepo,
63+
{
64+
owner,
65+
repo,
66+
state: 'open',
67+
filter: 'all'
68+
}
69+
);
70+
for (const issue of issues) {
71+
if (!issue.pull_request && !excludedAuthors.has(issue.user.login)) {
72+
await addItem(issue.node_id, 'issue', issue.number);
73+
}
74+
}
75+
76+
// Add open, non-draft PRs (regardless of review state), exclude by author
77+
const prs = await github.paginate(
78+
github.rest.pulls.list,
79+
{
80+
owner,
81+
repo,
82+
state: 'open',
83+
draft: false,
84+
}
85+
);
86+
for (const pr of prs) {
87+
if (!excludedAuthors.has(pr.user.login)) {
88+
await addItem(pr.node_id, 'pr', pr.number);
89+
}
90+
}
91+
} catch (error) {
92+
core.setFailed(`Workflow failed: ${error.message}`);
93+
}

backends/arm/TARGETS

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,15 @@ python_library(
1212
":arm_partitioner",
1313
]
1414
)
15+
python_library(
16+
name = "constants",
17+
srcs = [
18+
"constants.py",
19+
],
20+
deps = [
21+
"//executorch/exir/dialects:lib",
22+
],
23+
)
1524
python_library(
1625
name = "arm_partitioner",
1726
srcs = [
@@ -22,6 +31,7 @@ python_library(
2231
],
2332
deps = [
2433
":arm_backend",
34+
":constants",
2535
"//executorch/backends/arm/operator_support:operator_support",
2636
"//executorch/backends/arm/_passes:passes",
2737
"//executorch/exir:lib",
@@ -90,6 +100,7 @@ python_library(
90100
"fbsource//third-party/tosa_tools/v1.00/serialization_lib/python/serializer:serializer",
91101
"fbsource//third-party/tosa_tools/v0.80/serialization_lib/python/tosa:tosa",
92102
"fbsource//third-party/tosa_tools/v1.00/serialization_lib/python/tosa:tosa",
103+
":constants",
93104
":tosa_mapping",
94105
"//executorch/exir/dialects:lib",
95106
],

backends/arm/_passes/TARGETS

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ python_library(
44
name = "passes",
55
srcs = glob(["*.py"]),
66
deps = [
7+
"//executorch/backends/arm:constants",
78
"//executorch/backends/arm:tosa_quant_utils",
89
"//executorch/backends/arm:tosa_utils",
910
"//executorch/backends/arm/tosa/dialect:lib",

backends/arm/operator_support/TARGETS

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ python_library(
44
name = "operator_support",
55
srcs = glob(["*.py"]),
66
deps = [
7+
"//executorch/backends/arm:constants",
78
"//executorch/backends/arm/_passes:passes",
89
"//executorch/backends/arm:tosa_specification",
910
"//executorch/backends/transforms:remove_getitem_op",

backends/xnnpack/partition/xnnpack_partitioner.py

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
# This source code is licensed under the BSD-style license found in the
55
# LICENSE file in the root directory of this source tree.
66

7+
import inspect
78
import itertools
8-
99
import logging
1010
from typing import List, Optional, Type, Union
1111

@@ -65,6 +65,37 @@ def __init__(
6565
self.per_op_mode = per_op_mode
6666
super().__init__(delegation_spec, initialized_configs)
6767

68+
def _check_if_called_from_to_backend(self) -> bool:
69+
"""
70+
Check if the partition method is being called from the deprecated to_backend workflow.
71+
Returns True if called from deprecated direct to_backend, False if called from to_edge_transform_and_lower.
72+
"""
73+
stack = inspect.stack()
74+
75+
for frame_info in stack:
76+
if frame_info.function == "to_edge_transform_and_lower":
77+
return False
78+
79+
for frame_info in stack:
80+
if frame_info.function == "to_backend":
81+
filename = frame_info.filename
82+
if "program/_program.py" in filename:
83+
return True
84+
return False
85+
86+
def partition(self, exported_program):
87+
"""
88+
Override partition to add deprecation warning when called from to_backend.
89+
"""
90+
# Check if we're being called from the deprecated to_backend workflow
91+
if self._check_if_called_from_to_backend():
92+
logger.warning(
93+
"\nDEPRECATION WARNING: You are using the deprecated 'to_edge() + to_backend()' workflow. "
94+
"Please consider migrating to 'to_edge_transform_and_lower()' for better error handling and optimization. "
95+
)
96+
97+
return super().partition(exported_program)
98+
6899
def generate_partitions(self, ep: ExportedProgram) -> List[Partition]:
69100
"""
70101
generate_partitions is different if partitioner is set to per_op_mode

backends/xnnpack/runtime/XNNCompiler.cpp

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,10 @@ xnn_datatype getDataType(const DataType& data_type) {
120120
return xnn_datatype::xnn_datatype_qcint4;
121121
case DataType::xnn_datatype_qbint4:
122122
return xnn_datatype::xnn_datatype_qbint4;
123-
case DataType::xnn_datatype_qdint8: // always try to us kleidi
123+
case DataType::xnn_datatype_qdint8:
124+
#if !defined(ENABLE_XNNPACK_KLEIDI) || ENABLE_XNNPACK_KLEIDI == 0
125+
return xnn_datatype::xnn_datatype_qdint8;
126+
#endif
124127
case DataType::xnn_datatype_qpint8:
125128
return xnn_datatype::xnn_datatype_qpint8;
126129
case DataType::xnn_datatype_int32:
Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the BSD-style license found in the
5+
# LICENSE file in the root directory of this source tree.
6+
7+
import io
8+
import logging
9+
import unittest
10+
11+
import torch
12+
from executorch.backends.xnnpack.partition.xnnpack_partitioner import XnnpackPartitioner
13+
from executorch.exir import to_edge, to_edge_transform_and_lower
14+
from torch.export import export
15+
16+
17+
class TestXnnpackPartitioner(unittest.TestCase):
18+
"""Test cases for XnnpackPartitioner functionality and deprecation warnings."""
19+
20+
class SimpleModel(torch.nn.Module):
21+
def __init__(self):
22+
super().__init__()
23+
self.linear = torch.nn.Linear(10, 5)
24+
25+
def forward(self, x):
26+
return self.linear(x)
27+
28+
def test_deprecation_warning_for_to_backend_workflow(self):
29+
"""
30+
Test that the deprecated to_edge + to_backend workflow shows a deprecation warning.
31+
"""
32+
model = self.SimpleModel()
33+
x = torch.randn(1, 10)
34+
35+
exported_model = export(model, (x,))
36+
37+
# Capture log output to check for deprecation warning
38+
log_capture_string = io.StringIO()
39+
ch = logging.StreamHandler(log_capture_string)
40+
ch.setLevel(logging.WARNING)
41+
42+
logger = logging.getLogger(
43+
"executorch.backends.xnnpack.partition.xnnpack_partitioner"
44+
)
45+
logger.addHandler(ch)
46+
logger.setLevel(logging.WARNING)
47+
48+
edge = to_edge(exported_model)
49+
partitioner = XnnpackPartitioner()
50+
51+
edge.to_backend(partitioner)
52+
53+
log_contents = log_capture_string.getvalue()
54+
self.assertIn("DEPRECATION WARNING", log_contents)
55+
self.assertIn("to_edge() + to_backend()", log_contents)
56+
self.assertIn("to_edge_transform_and_lower()", log_contents)
57+
58+
def test_no_warning_for_to_edge_transform_and_lower_workflow(self):
59+
"""
60+
Test that the recommended to_edge_transform_and_lower workflow does NOT show a deprecation warning.
61+
"""
62+
63+
model = self.SimpleModel()
64+
x = torch.randn(1, 10)
65+
66+
exported_model = export(model, (x,))
67+
68+
# Capture log output to check for deprecation warning
69+
log_capture_string = io.StringIO()
70+
ch = logging.StreamHandler(log_capture_string)
71+
ch.setLevel(logging.WARNING)
72+
73+
logger = logging.getLogger(
74+
"executorch.backends.xnnpack.partition.xnnpack_partitioner"
75+
)
76+
logger.addHandler(ch)
77+
logger.setLevel(logging.WARNING)
78+
79+
partitioner = XnnpackPartitioner()
80+
81+
to_edge_transform_and_lower(exported_model, partitioner=[partitioner])
82+
83+
log_contents = log_capture_string.getvalue()
84+
self.assertNotIn("DEPRECATION WARNING", log_contents)

devtools/etrecord/_etrecord.py

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,22 @@ def __init__(
7070
_reference_outputs: Optional[Dict[str, List[ProgramOutput]]] = None,
7171
_representative_inputs: Optional[List[ProgramInput]] = None,
7272
):
73+
"""
74+
Please do not construct an ETRecord object directly.
75+
76+
If you want to create an ETRecord for logging AOT information to further analysis, please mark `generate_etrecord`
77+
as True in your export api, and get the ETRecord object from the `ExecutorchProgramManager`.
78+
For exmaple:
79+
```python
80+
exported_program = torch.export.export(model, inputs)
81+
edge_program = to_edge_transform_and_lower(exported_program, generate_etrecord=True)
82+
executorch_program = edge_program.to_executorch()
83+
etrecord = executorch_program.get_etrecord()
84+
```
85+
86+
If user need to create an ETRecord manually, please use the `create_etrecord` function.
87+
"""
88+
7389
self.exported_program = exported_program
7490
self.export_graph_id = export_graph_id
7591
self.edge_dialect_program = edge_dialect_program
@@ -81,15 +97,25 @@ def __init__(
8197

8298
def save(self, path: Union[str, os.PathLike, BinaryIO, IO[bytes]]) -> None:
8399
"""
84-
Serialize and save the ETRecord to the specified path.
100+
Serialize and save the ETRecord to the specified path for use in Inspector. The ETRecord
101+
should contains at least edge dialect program and executorch program information for further
102+
analysis, otherwise it will raise an exception.
85103
86104
Args:
87105
path: Path where the ETRecord file will be saved to.
106+
107+
Raises:
108+
RuntimeError: If the ETRecord does not contain essential information for Inpector.
88109
"""
89110
if isinstance(path, (str, os.PathLike)):
90111
# pyre-ignore[6]: In call `os.fspath`, for 1st positional argument, expected `str` but got `Union[PathLike[typing.Any], str]`
91112
path = os.fspath(path)
92113

114+
if not (self.edge_dialect_program and self._debug_handle_map):
115+
raise RuntimeError(
116+
"ETRecord must contain edge dialect program and executorch program to be saved"
117+
)
118+
93119
etrecord_zip = ZipFile(path, "w")
94120

95121
try:

devtools/etrecord/tests/TARGETS

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,7 @@ python_unittest(
77
name = "etrecord_test",
88
srcs = ["etrecord_test.py"],
99
deps = [
10-
"//caffe2:torch",
11-
"//executorch/devtools/bundled_program:config",
12-
"//executorch/devtools/bundled_program:core",
13-
"//executorch/devtools/etrecord:etrecord",
14-
"//executorch/exir:lib",
15-
"//executorch/exir/tests:models",
10+
":etrecord_test_library"
1611
],
1712
)
1813

@@ -26,5 +21,6 @@ python_library(
2621
"//executorch/devtools/etrecord:etrecord",
2722
"//executorch/exir:lib",
2823
"//executorch/exir/tests:models",
24+
"//executorch/backends/xnnpack/partition:xnnpack_partitioner",
2925
],
3026
)

0 commit comments

Comments
 (0)