Skip to content

Commit aea11e8

Browse files
Update
[ghstack-poisoned]
2 parents d075361 + c837491 commit aea11e8

File tree

152 files changed

+7885
-3296
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

152 files changed

+7885
-3296
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
cf9d09490c7f6685ec68d5db3acf2e0d73c54d00
1+
53a2908a10f414a2f85caa06703a26a40e873869

.ci/scripts/test-cuda-build.sh

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,6 @@ test_executorch_cuda_build() {
2727
nvcc --version || echo "nvcc not found"
2828
nvidia-smi || echo "nvidia-smi not found"
2929

30-
# Set CMAKE_ARGS to enable CUDA build - ExecuTorch will handle PyTorch installation automatically
31-
export CMAKE_ARGS="-DEXECUTORCH_BUILD_CUDA=ON"
32-
3330
echo "=== Starting ExecuTorch Installation ==="
3431
# Install ExecuTorch with CUDA support with timeout and error handling
3532
timeout 5400 ./install_executorch.sh || {

.ci/scripts/test_llama_lora.sh

Lines changed: 45 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ else
9494
exit 1
9595
fi
9696

97-
# Export LoRA PTE, PTD file.
97+
# Export LoRA PTE, foundation PTD file.
9898
MODEL_SEPARATE="${MODEL_NAME}_separate"
9999
$PYTHON_EXECUTABLE -m extension.llm.export.export_llm \
100100
base.checkpoint="${DOWNLOADED_PATH}/consolidated.00.pth" \
@@ -114,20 +114,62 @@ $PYTHON_EXECUTABLE -m extension.llm.export.export_llm \
114114
NOW=$(date +"%H:%M:%S")
115115
echo "Starting to run llama runner at ${NOW}"
116116
# shellcheck source=/dev/null
117-
cmake-out/examples/models/llama/llama_main --model_path=${MODEL_SEPARATE}.pte --data_path=${MODEL_SEPARATE}.ptd --prompt="${PROMPT}" ${RUNTIME_ARGS} > result2.txt
117+
cmake-out/examples/models/llama/llama_main --model_path=${MODEL_SEPARATE}.pte --data_paths=${MODEL_SEPARATE}.ptd --prompt="${PROMPT}" ${RUNTIME_ARGS} > result2.txt
118118
NOW=$(date +"%H:%M:%S")
119119
echo "Finished at ${NOW}"
120120

121121
RESULT2=$(cat result2.txt)
122122
if [[ "${RESULT2}" == "${EXPECTED_PREFIX}"* ]]; then
123123
echo "Expected result prefix: ${EXPECTED_PREFIX}"
124124
echo "Actual result: ${RESULT2}"
125+
# Do not clean up files if test passes, as they're re-used in the next test.
125126
echo "Success"
126-
cleanup_files
127127
else
128128
echo "Expected result prefix: ${EXPECTED_PREFIX}"
129129
echo "Actual result: ${RESULT2}"
130130
echo "Failure; results not the same"
131131
cleanup_files
132132
exit 1
133133
fi
134+
135+
# Export LoRA PTE, LoRA PTD, foundation PTD file.
136+
MODEL_PROGRAM_ONLY="${MODEL_NAME}_program"
137+
MODEL_LORA_WEIGHTS="lora_weights"
138+
MODEL_FOUNDATION_WEIGHTS="foundation_weights"
139+
$PYTHON_EXECUTABLE -m extension.llm.export.export_llm \
140+
base.checkpoint="${DOWNLOADED_PATH}/consolidated.00.pth" \
141+
base.params="${DOWNLOADED_PATH}/params.json" \
142+
base.adapter_checkpoint="${DOWNLOADED_PATH}/adapter_model.pt" \
143+
base.adapter_config="${DOWNLOADED_PATH}/adapter_config.json" \
144+
base.tokenizer_path="${DOWNLOADED_PATH}/tokenizer.model" \
145+
model.use_kv_cache=true \
146+
model.use_sdpa_with_kv_cache=true \
147+
model.dtype_override="fp32" \
148+
backend.xnnpack.enabled=true \
149+
backend.xnnpack.extended_ops=true \
150+
export.output_name="${MODEL_PROGRAM_ONLY}.pte" \
151+
export.foundation_weights_file="${MODEL_FOUNDATION_WEIGHTS}.ptd" \
152+
export.lora_weights_file="${MODEL_LORA_WEIGHTS}.ptd"
153+
154+
# Run llama runner.
155+
NOW=$(date +"%H:%M:%S")
156+
echo "Starting to run llama runner at ${NOW}"
157+
# shellcheck source=/dev/null
158+
cmake-out/examples/models/llama/llama_main --model_path=${MODEL_PROGRAM_ONLY}.pte --data_paths="${MODEL_FOUNDATION_WEIGHTS}.ptd,${MODEL_LORA_WEIGHTS}.ptd" --prompt="${PROMPT}" ${RUNTIME_ARGS} > result3.txt
159+
NOW=$(date +"%H:%M:%S")
160+
echo "Finished at ${NOW}"
161+
162+
RESULT3=$(cat result3.txt)
163+
if [[ "${RESULT3}" == "${EXPECTED_PREFIX}"* ]]; then
164+
echo "Expected result prefix: ${EXPECTED_PREFIX}"
165+
echo "Actual result: ${RESULT3}"
166+
echo "Success"
167+
else
168+
echo "Expected result prefix: ${EXPECTED_PREFIX}"
169+
echo "Actual result: ${RESULT3}"
170+
echo "Failure; results not the same"
171+
cleanup_files
172+
exit 1
173+
fi
174+
175+
cleanup_files

.ci/scripts/test_wheel_package_qnn.sh

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,10 +158,12 @@ PY
158158

159159
# Install torchao based on the pinned PyTorch version
160160
"$PIPBIN" install torch=="${TORCH_VERSION}.${NIGHTLY_VERSION}" --index-url "https://download.pytorch.org/whl/nightly/cpu"
161+
"$PIPBIN" install wheel
161162

162163
# Install torchao based on the pinned commit from third-party/ao submodule
163164
pushd "$REPO_ROOT/third-party/ao" > /dev/null
164-
USE_CPP=0 "$PYBIN" setup.py develop
165+
export USE_CPP=0
166+
"$PIPBIN" install . --no-build-isolation
165167
popd > /dev/null
166168

167169
echo "=== [$LABEL] Import smoke tests ==="

.githooks/README.md

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
# Git Hooks
2+
3+
This directory contains Git hooks for the ExecuTorch repository.
4+
5+
## Pre-commit Hook
6+
7+
The pre-commit hook automatically updates the PyTorch commit pin in `.ci/docker/ci_commit_pins/pytorch.txt` whenever `torch_pin.py` is modified.
8+
9+
### How It Works
10+
11+
1. When you commit changes to `torch_pin.py`, the hook detects the change
12+
2. It parses the `NIGHTLY_VERSION` field (e.g., `dev20251004`)
13+
3. Converts it to a date string (e.g., `2025-10-04`)
14+
4. Fetches the corresponding commit hash from the PyTorch nightly branch at https://github.com/pytorch/pytorch/tree/nightly
15+
5. Updates `.ci/docker/ci_commit_pins/pytorch.txt` with the new commit hash
16+
6. Automatically stages the updated file for commit
17+
18+
### Installation
19+
20+
To install the Git hooks, run:
21+
22+
```bash
23+
.githooks/install.sh
24+
```
25+
26+
This will copy the pre-commit hook to `.git/hooks/` and make it executable.
27+
28+
### Manual Usage
29+
30+
You can also run the update script manually at any time:
31+
32+
```bash
33+
python .github/scripts/update_pytorch_pin.py
34+
```
35+
36+
### Uninstalling
37+
38+
To remove the pre-commit hook:
39+
40+
```bash
41+
rm .git/hooks/pre-commit
42+
```
43+
44+
## Troubleshooting
45+
46+
If the hook fails during a commit:
47+
48+
1. Check that Python 3 is available in your PATH
49+
2. Ensure you have internet connectivity to fetch commits from GitHub
50+
3. Verify that the `NIGHTLY_VERSION` in `torch_pin.py` is in the correct format (`devYYYYMMDD`)
51+
4. Make sure the corresponding nightly release exists in the PyTorch nightly branch
52+
53+
You can run the script manually to see detailed error messages:
54+
55+
```bash
56+
python .github/scripts/update_pytorch_pin.py
57+
```

.githooks/install.sh

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
#!/usr/bin/env bash
2+
3+
# Script to install Git hooks from .githooks directory
4+
5+
set -e
6+
7+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
8+
GIT_DIR="$(git rev-parse --git-dir)"
9+
HOOKS_DIR="${GIT_DIR}/hooks"
10+
11+
echo "Installing Git hooks..."
12+
13+
# Install pre-commit hook
14+
echo "📦 Installing pre-commit hook..."
15+
cp "${SCRIPT_DIR}/pre-commit" "${HOOKS_DIR}/pre-commit"
16+
chmod +x "${HOOKS_DIR}/pre-commit"
17+
echo "✅ pre-commit hook installed"
18+
19+
echo ""
20+
echo "🎉 Git hooks installed successfully!"
21+
echo ""
22+
echo "The pre-commit hook will automatically update .ci/docker/ci_commit_pins/pytorch.txt"
23+
echo "whenever you commit changes to torch_pin.py"

.githooks/pre-commit

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
#!/usr/bin/env bash
2+
3+
# Pre-commit hook to automatically update PyTorch commit pin when torch_pin.py changes
4+
5+
# Check if torch_pin.py is being committed
6+
if git diff --cached --name-only | grep -q "^torch_pin.py$"; then
7+
echo "🔍 Detected changes to torch_pin.py"
8+
echo "📝 Updating PyTorch commit pin..."
9+
10+
# Run the update script
11+
if python .github/scripts/update_pytorch_pin.py; then
12+
# Check if pytorch.txt was modified
13+
if ! git diff --quiet .ci/docker/ci_commit_pins/pytorch.txt; then
14+
echo "✅ PyTorch commit pin updated successfully"
15+
# Stage the updated file
16+
git add .ci/docker/ci_commit_pins/pytorch.txt
17+
echo "📌 Staged .ci/docker/ci_commit_pins/pytorch.txt"
18+
else
19+
echo "ℹ️ PyTorch commit pin unchanged"
20+
fi
21+
else
22+
echo "❌ Failed to update PyTorch commit pin"
23+
echo "Please run: python .github/scripts/update_pytorch_pin.py"
24+
exit 1
25+
fi
26+
fi
27+
28+
exit 0
Lines changed: 123 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
#!/usr/bin/env python3
2+
3+
import json
4+
import re
5+
import sys
6+
import urllib.request
7+
from datetime import datetime
8+
9+
10+
def parse_nightly_version(nightly_version):
11+
"""
12+
Parse NIGHTLY_VERSION (e.g., 'dev20251004') to date string (e.g., '2025-10-04').
13+
14+
Args:
15+
nightly_version: String in format 'devYYYYMMDD'
16+
17+
Returns:
18+
Date string in format 'YYYY-MM-DD'
19+
"""
20+
match = re.match(r"dev(\d{4})(\d{2})(\d{2})", nightly_version)
21+
if not match:
22+
raise ValueError(f"Invalid NIGHTLY_VERSION format: {nightly_version}")
23+
24+
year, month, day = match.groups()
25+
return f"{year}-{month}-{day}"
26+
27+
28+
def get_torch_nightly_version():
29+
"""
30+
Read NIGHTLY_VERSION from torch_pin.py.
31+
32+
Returns:
33+
NIGHTLY_VERSION string
34+
"""
35+
with open("torch_pin.py", "r") as f:
36+
content = f.read()
37+
38+
match = re.search(r'NIGHTLY_VERSION\s*=\s*["\']([^"\']+)["\']', content)
39+
if not match:
40+
raise ValueError("Could not find NIGHTLY_VERSION in torch_pin.py")
41+
42+
return match.group(1)
43+
44+
45+
def get_commit_hash_for_nightly(date_str):
46+
"""
47+
Fetch commit hash from PyTorch nightly branch for a given date.
48+
49+
Args:
50+
date_str: Date string in format 'YYYY-MM-DD'
51+
52+
Returns:
53+
Commit hash string
54+
"""
55+
api_url = "https://api.github.com/repos/pytorch/pytorch/commits"
56+
params = f"?sha=nightly&per_page=100"
57+
url = api_url + params
58+
59+
req = urllib.request.Request(url)
60+
req.add_header("Accept", "application/vnd.github.v3+json")
61+
req.add_header("User-Agent", "ExecuTorch-Bot")
62+
63+
try:
64+
with urllib.request.urlopen(req) as response:
65+
commits = json.loads(response.read().decode())
66+
except Exception as e:
67+
print(f"Error fetching commits: {e}", file=sys.stderr)
68+
sys.exit(1)
69+
70+
# Look for commit with title matching "{date_str} nightly release"
71+
target_title = f"{date_str} nightly release"
72+
73+
for commit in commits:
74+
commit_msg = commit.get("commit", {}).get("message", "")
75+
# Check if the first line of commit message matches
76+
first_line = commit_msg.split("\n")[0].strip()
77+
if first_line == target_title or first_line.startswith(f"{date_str} nightly"):
78+
return commit["sha"]
79+
80+
raise ValueError(
81+
f"Could not find commit with title matching '{target_title}' in nightly branch"
82+
)
83+
84+
85+
def update_pytorch_pin(commit_hash):
86+
"""
87+
Update .ci/docker/ci_commit_pins/pytorch.txt with the new commit hash.
88+
89+
Args:
90+
commit_hash: Commit hash to write
91+
"""
92+
pin_file = ".ci/docker/ci_commit_pins/pytorch.txt"
93+
with open(pin_file, "w") as f:
94+
f.write(f"{commit_hash}\n")
95+
print(f"Updated {pin_file} with commit hash: {commit_hash}")
96+
97+
98+
def main():
99+
try:
100+
# Read NIGHTLY_VERSION from torch_pin.py
101+
nightly_version = get_torch_nightly_version()
102+
print(f"Found NIGHTLY_VERSION: {nightly_version}")
103+
104+
# Parse to date string
105+
date_str = parse_nightly_version(nightly_version)
106+
print(f"Parsed date: {date_str}")
107+
108+
# Fetch commit hash from PyTorch nightly branch
109+
commit_hash = get_commit_hash_for_nightly(date_str)
110+
print(f"Found commit hash: {commit_hash}")
111+
112+
# Update the pin file
113+
update_pytorch_pin(commit_hash)
114+
115+
print("Successfully updated PyTorch commit pin!")
116+
117+
except Exception as e:
118+
print(f"Error: {e}", file=sys.stderr)
119+
sys.exit(1)
120+
121+
122+
if __name__ == "__main__":
123+
main()

.github/workflows/cuda.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Test ExecuTorch CUDA Build Compatibility
22
# This workflow tests whether ExecuTorch can be successfully built with CUDA support
33
# across different CUDA versions (12.6, 12.8, 12.9) using the command:
4-
# CMAKE_ARGS="-DEXECUTORCH_BUILD_CUDA=ON" ./install_executorch.sh
4+
# ./install_executorch.sh
55
#
66
# Note: ExecuTorch automatically detects the system CUDA version using nvcc and
77
# installs the appropriate PyTorch wheel. No manual CUDA/PyTorch installation needed.
@@ -43,7 +43,7 @@ jobs:
4343
set -eux
4444
4545
# Test ExecuTorch CUDA build - ExecuTorch will automatically detect CUDA version
46-
# and install the appropriate PyTorch wheel when CMAKE_ARGS="-DEXECUTORCH_BUILD_CUDA=ON"
46+
# and install the appropriate PyTorch wheel
4747
source .ci/scripts/test-cuda-build.sh "${{ matrix.cuda-version }}"
4848
4949
# This job will fail if any of the CUDA versions fail
@@ -71,7 +71,7 @@ jobs:
7171
strategy:
7272
fail-fast: false
7373
matrix:
74-
model: [linear, add, add_mul, resnet18]
74+
model: [linear, add, add_mul, resnet18, conv1d]
7575
with:
7676
timeout: 90
7777
runner: linux.g5.4xlarge.nvidia.gpu
@@ -83,7 +83,7 @@ jobs:
8383
script: |
8484
set -eux
8585
86-
PYTHON_EXECUTABLE=python CMAKE_ARGS="-DEXECUTORCH_BUILD_CUDA=ON" ./install_executorch.sh
86+
PYTHON_EXECUTABLE=python ./install_executorch.sh
8787
export LD_LIBRARY_PATH=/opt/conda/lib:$LD_LIBRARY_PATH
8888
PYTHON_EXECUTABLE=python source .ci/scripts/test_model.sh "${{ matrix.model }}" cmake cuda
8989
@@ -110,7 +110,7 @@ jobs:
110110
set -eux
111111
112112
echo "::group::Setup ExecuTorch"
113-
CMAKE_ARGS="-DEXECUTORCH_BUILD_CUDA=ON" ./install_executorch.sh
113+
./install_executorch.sh
114114
echo "::endgroup::"
115115
116116
echo "::group::Setup Huggingface"

0 commit comments

Comments
 (0)