Skip to content

Commit 8d0cb06

Browse files
authored
Merge branch 'main' into add-dim-order-clone-aot
2 parents f9f9515 + 49bc664 commit 8d0cb06

File tree

340 files changed

+13594
-6814
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

340 files changed

+13594
-6814
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
6fc0ad22f0a07b6f38d138861c56a765d5a9bb02
1+
e7152ff8a6a929a0db7f3f4a72a5b6d471769cd3

.ci/scripts/setup-linux.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ set -exu
1111
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
1212

1313
read -r BUILD_TOOL BUILD_MODE EDITABLE < <(parse_args "$@")
14+
echo "Build tool: $BUILD_TOOL, Mode: $BUILD_MODE"
1415

1516
# As Linux job is running inside a Docker container, all of its dependencies
1617
# have already been installed, so we use PyTorch build from source here instead

.ci/scripts/test_backend_linux.sh

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#!/usr/bin/env bash
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
set -eux
8+
9+
SUITE=$1
10+
FLOW=$2
11+
ARTIFACT_DIR=$3
12+
13+
REPORT_FILE="$ARTIFACT_DIR/test-report-$FLOW-$SUITE.csv"
14+
15+
echo "Running backend test job for suite $SUITE, flow $FLOW."
16+
echo "Saving job artifacts to $ARTIFACT_DIR."
17+
18+
# The generic Linux job chooses to use base env, not the one setup by the image
19+
eval "$(conda shell.bash hook)"
20+
CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
21+
conda activate "${CONDA_ENV}"
22+
23+
export PYTHON_EXECUTABLE=python
24+
25+
# CMake options to use, in addition to the defaults.
26+
EXTRA_BUILD_ARGS=""
27+
28+
if [[ "$FLOW" == *qnn* ]]; then
29+
# Setup QNN sdk and deps - note that this is a bit hacky due to the nature of the
30+
# Qualcomm build. TODO (gjcomer) Clean this up once the QNN pybinding integration is
31+
# cleaned up.
32+
PYTHON_EXECUTABLE=python bash .ci/scripts/setup-linux.sh --build-tool cmake
33+
PYTHON_EXECUTABLE=python bash .ci/scripts/setup-qnn-deps.sh
34+
PYTHON_EXECUTABLE=python bash .ci/scripts/build-qnn-sdk.sh
35+
QNN_X86_LIB_DIR=`realpath build-x86/lib/`
36+
QNN_SDK_ROOT="/tmp/qnn/2.28.0.241029"
37+
export LD_LIBRARY_PATH"=$QNN_X86_LIB_DIR:$QNN_SDK_ROOT/lib/x86_64-linux-clang/:${LD_LIBRARY_PATH:-}"
38+
39+
# TODO Get SDK root from install scripts
40+
EXTRA_BUILD_ARGS+=" -DEXECUTORCH_BUILD_QNN=ON -DQNN_SDK_ROOT=$QNN_SDK_ROOT"
41+
fi
42+
43+
if [[ "$FLOW" == *vulkan* ]]; then
44+
# Setup swiftshader and Vulkan SDK which are required to build the Vulkan delegate
45+
source .ci/scripts/setup-vulkan-linux-deps.sh
46+
47+
EXTRA_BUILD_ARGS+=" -DEXECUTORCH_BUILD_VULKAN=ON"
48+
fi
49+
50+
# We need the runner to test the built library.
51+
PYTHON_EXECUTABLE=python CMAKE_ARGS="$EXTRA_BUILD_ARGS" .ci/scripts/setup-linux.sh --build-tool cmake --build-mode Release --editable true
52+
53+
EXIT_CODE=0
54+
python -m executorch.backends.test.suite.runner $SUITE --flow $FLOW --report "$REPORT_FILE" || EXIT_CODE=$?
55+
56+
# Generate markdown summary.
57+
python -m executorch.backends.test.suite.generate_markdown_summary "$REPORT_FILE" > ${GITHUB_STEP_SUMMARY:-"step_summary.md"} --exit-code $EXIT_CODE

.ci/scripts/test_backend_macos.sh

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
#!/usr/bin/env bash
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
set -eux
8+
9+
SUITE=$1
10+
FLOW=$2
11+
ARTIFACT_DIR=$3
12+
13+
REPORT_FILE="$ARTIFACT_DIR/test-report-$FLOW-$SUITE.csv"
14+
15+
echo "Running backend test job for suite $SUITE, flow $FLOW."
16+
echo "Saving job artifacts to $ARTIFACT_DIR."
17+
18+
${CONDA_RUN} --no-capture-output pip install awscli==1.37.21
19+
20+
bash .ci/scripts/setup-conda.sh
21+
eval "$(conda shell.bash hook)"
22+
23+
PYTHON_EXECUTABLE=python
24+
${CONDA_RUN} --no-capture-output .ci/scripts/setup-macos.sh --build-tool cmake --build-mode Release
25+
26+
EXIT_CODE=0
27+
${CONDA_RUN} --no-capture-output python -m executorch.backends.test.suite.runner $SUITE --flow $FLOW --report "$REPORT_FILE" || EXIT_CODE=$?
28+
29+
# Generate markdown summary.
30+
${CONDA_RUN} --no-capture-output python -m executorch.backends.test.suite.generate_markdown_summary "$REPORT_FILE" > ${GITHUB_STEP_SUMMARY:-"step_summary.md"} --exit-code $EXIT_CODE

.ci/scripts/test_huggingface_optimum_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ def test_vit(model_id, model_dir, recipe, *, quantize=False, run_only=False):
369369
), # fails to lower for CoreML
370370
"smollm2-135m": ("HuggingFaceTB/SmolLM2-135M", test_text_generation),
371371
"smollm3-3b": ("HuggingFaceTB/SmolLM3-3B", test_text_generation),
372-
"olmo": ("allenai/OLMo-1B-hf", test_text_generation),
372+
"olmo-1b": ("allenai/OLMo-1B-hf", test_text_generation),
373373
}
374374

375375
_mask_fill_mapping = {

.ci/scripts/unittest-buck2.sh

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,10 @@ set -eux
1111
# TODO: can't query //kernels/prim_ops because of non-buckified stuff in OSS.
1212
buck2 query "//backends/apple/... + //backends/example/... + \
1313
//backends/mediatek/... + //backends/transforms/... + \
14-
//backends/xnnpack/... + //configurations/... + //kernels/aten/... + \
15-
//kernels/optimized/... + //kernels/portable/... + //kernels/quantized/... + \
16-
//kernels/test/... + //runtime/... + //schema/... + //test/... + //util/..."
14+
//backends/xnnpack/... + //configurations/... + //extension/flat_tensor: + \
15+
//extension/llm/runner: + //kernels/aten/... + //kernels/optimized/... + \
16+
//kernels/portable/... + //kernels/quantized/... + //kernels/test/... + \
17+
//runtime/... + //schema/... + //test/... + //util/..."
1718

1819
# TODO: optimized ops are unbuildable because they now use ATen; put
1920
# them back after we can use PyTorch in OSS buck.

.ci/scripts/utils.sh

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -131,8 +131,6 @@ build_executorch_runner_cmake() {
131131
else
132132
CXXFLAGS=""
133133
fi
134-
# This command uses buck2 to gather source files and buck2 could crash flakily
135-
# on MacOS
136134
CXXFLAGS="$CXXFLAGS" retry cmake -DPYTHON_EXECUTABLE="${PYTHON_EXECUTABLE}" -DCMAKE_BUILD_TYPE="${1:-Release}" ..
137135
popd || return
138136

.github/workflows/add-unanswered-to-project.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
name: Add Open External Contributor PRs and Issues to PyTorch Org Project 136
22

33
on:
4-
# schedule:
5-
# - cron: '0 * * * *'
64
workflow_dispatch:
7-
5+
pull_request:
6+
paths:
7+
.github/workflows/add-unanswered-to-project.yml
88
jobs:
99
add_to_project:
1010
runs-on: ubuntu-latest

.github/workflows/nightly.yml

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,3 +36,51 @@ jobs:
3636
uses: ./.github/workflows/_link_check.yml
3737
with:
3838
ref: ${{ github.sha }}
39+
40+
backend-test-linux:
41+
uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main
42+
strategy:
43+
fail-fast: false
44+
matrix:
45+
flow: [
46+
qnn, qnn_16a16w, qnn_16a8w, qnn_16a4w, qnn_16a4w_block, qnn_8a8w,
47+
vulkan, vulkan_static_int8_per_channel,
48+
xnnpack, xnnpack_dynamic_int8_per_channel, xnnpack_static_int8_per_channel, xnnpack_static_int8_per_tensor
49+
]
50+
suite: [models, operators]
51+
with:
52+
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
53+
runner: linux.4xlarge.memory
54+
docker-image: ci-image:executorch-ubuntu-22.04-clang12
55+
submodules: recursive
56+
timeout: 120
57+
upload-artifact: test-report-${{ matrix.flow }}-${{ matrix.suite }}
58+
script: |
59+
set -eux
60+
61+
source .ci/scripts/test_backend_linux.sh "${{ matrix.suite }}" "${{ matrix.flow }}" "${RUNNER_ARTIFACT_DIR}"
62+
63+
backend-test-macos:
64+
uses: pytorch/test-infra/.github/workflows/macos_job.yml@main
65+
permissions:
66+
id-token: write
67+
contents: read
68+
strategy:
69+
fail-fast: false
70+
matrix:
71+
flow: [coreml, coreml_static_int8]
72+
suite: [models, operators]
73+
with:
74+
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
75+
runner: macos-m1-stable
76+
python-version: 3.12
77+
submodules: recursive
78+
timeout: 120
79+
upload-artifact: test-report-${{ matrix.flow }}-${{ matrix.suite }}
80+
script: |
81+
set -eux
82+
83+
# This is needed to get the prebuilt PyTorch wheel from S3
84+
${CONDA_RUN} --no-capture-output pip install awscli==1.37.21
85+
86+
source .ci/scripts/test_backend_macos.sh "${{ matrix.suite }}" "${{ matrix.flow }}" "${RUNNER_ARTIFACT_DIR}"

.github/workflows/stale.yml

Lines changed: 149 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,149 @@
1+
# The behavior is:
2+
# - If a PR is not labeled stale, after 60 days inactivity label the PR as stale and comment about it.
3+
# - If a PR is labeled stale, after 30 days inactivity close the PR.
4+
# - `high priority` and `no-stale` PRs are exempt.
5+
6+
name: Close stale pull requests
7+
8+
on:
9+
schedule:
10+
# Run daily at 00:30 UTC.
11+
- cron: '30 0 * * *'
12+
workflow_dispatch:
13+
14+
jobs:
15+
stale:
16+
if: ${{ github.repository == 'pytorch/executorch' }}
17+
runs-on: linux.large
18+
permissions:
19+
contents: read
20+
pull-requests: write
21+
22+
steps:
23+
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
24+
with:
25+
script: |
26+
// Do some dumb retries on requests.
27+
const retries = 7;
28+
const baseBackoff = 100;
29+
const sleep = timeout => new Promise(resolve => setTimeout(resolve, timeout));
30+
github.hook.wrap('request', async (request, options) => {
31+
for (let attempt = 1; attempt <= retries; attempt++) {
32+
try {
33+
return await request(options);
34+
} catch (err) {
35+
if (attempt < retries) {
36+
core.warning(`Request getting retried. Attempt: ${attempt}`);
37+
await sleep(baseBackoff * Math.pow(2, attempt));
38+
continue;
39+
}
40+
throw err;
41+
}
42+
}
43+
});
44+
45+
const MAX_API_REQUESTS = 100;
46+
47+
// If a PRs not labeled stale, label them stale after no update for 60 days.
48+
const STALE_LABEL_THRESHOLD_MS = 1000 * 60 * 60 * 24 * 60;
49+
// For PRs already labeled stale, close after not update for 30 days.
50+
const STALE_CLOSE_THRESHOLD_MS = 1000 * 60 * 60 * 24 * 30;
51+
52+
const STALE_MESSAGE =
53+
"Looks like this PR hasn't been updated in a while so we're going to go ahead and mark this as `Stale`. <br>" +
54+
"Feel free to remove the `Stale` label if you feel this was a mistake. <br>" +
55+
"If you are unable to remove the `Stale` label please contact a maintainer in order to do so. <br>" +
56+
"If you want the bot to never mark this PR stale again, add the `no-stale` label.<br>" +
57+
"`Stale` pull requests will automatically be closed after 30 days of inactivity.<br>";
58+
59+
let numAPIRequests = 0;
60+
let numProcessed = 0;
61+
62+
async function processPull(pull) {
63+
core.info(`[${pull.number}] URL: ${pull.html_url}`);
64+
numProcessed += 1;
65+
const labels = pull.labels.map((label) => label.name);
66+
67+
// Skip if certain labels are present.
68+
if (labels.includes("no-stale") || labels.includes("high priority")) {
69+
core.info(`[${pull.number}] Skipping because PR has an exempting label.`);
70+
return false;
71+
}
72+
73+
// Check if the PR is stale, according to our configured thresholds.
74+
let staleThresholdMillis;
75+
if (labels.includes("Stale")) {
76+
core.info(`[${pull.number}] PR is labeled stale, checking whether we should close it.`);
77+
staleThresholdMillis = STALE_CLOSE_THRESHOLD_MS;
78+
} else {
79+
core.info(`[${pull.number}] Checking whether to label PR as stale.`);
80+
staleThresholdMillis = STALE_LABEL_THRESHOLD_MS;
81+
}
82+
83+
const millisSinceLastUpdated =
84+
new Date().getTime() - new Date(pull.updated_at).getTime();
85+
86+
if (millisSinceLastUpdated < staleThresholdMillis) {
87+
core.info(`[${pull.number}] Skipping because PR was updated recently`);
88+
return false;
89+
}
90+
91+
// At this point, we know we should do something.
92+
// For PRs already labeled stale, close them.
93+
if (labels.includes("Stale")) {
94+
core.info(`[${pull.number}] Closing PR.`);
95+
numAPIRequests += 1;
96+
//await github.rest.issues.update({
97+
//owner: "pytorch",
98+
//repo: "executorch",
99+
//issue_number: pull.number,
100+
//state: "closed",
101+
//});
102+
} else {
103+
// For PRs not labeled stale, label them stale.
104+
core.info(`[${pull.number}] Labeling PR as stale.`);
105+
106+
numAPIRequests += 1;
107+
//await github.rest.issues.createComment({
108+
//owner: "pytorch",
109+
//repo: "executorch",
110+
//issue_number: pull.number,
111+
//body: STALE_MESSAGE,
112+
//});
113+
114+
numAPIRequests += 1;
115+
//await github.rest.issues.addLabels({
116+
//owner: "pytorch",
117+
//repo: "executorch",
118+
//issue_number: pull.number,
119+
//labels: ["Stale"],
120+
//});
121+
}
122+
}
123+
124+
for await (const response of github.paginate.iterator(
125+
github.rest.pulls.list,
126+
{
127+
owner: "pytorch",
128+
repo: "executorch",
129+
state: "open",
130+
sort: "created",
131+
direction: "asc",
132+
per_page: 100,
133+
}
134+
)) {
135+
numAPIRequests += 1;
136+
const pulls = response.data;
137+
// Awaiting in a loop is intentional here. We want to serialize execution so
138+
// that log groups are printed correctl
139+
for (const pull of pulls) {
140+
if (numAPIRequests > MAX_API_REQUESTS) {
141+
core.warning("Max API requests exceeded, exiting.");
142+
process.exit(0);
143+
}
144+
await core.group(`Processing PR #${pull.number}`, async () => {
145+
await processPull(pull);
146+
});
147+
}
148+
}
149+
core.info(`Processed ${numProcessed} PRs total.`);

0 commit comments

Comments
 (0)