Skip to content

Commit 52eeb46

Browse files
authored
Merge branch 'pytorch:main' into main
2 parents 2dec913 + cd306d3 commit 52eeb46

File tree

628 files changed

+878345
-5547
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

628 files changed

+878345
-5547
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
c8a648d4dffb9f0133ff4a2ea0e660b42105d3ad
1+
19eff28ff3f19b50da46f5a9ff5f4d4d213806fe

.ci/docker/common/install_cache.sh

Lines changed: 45 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,26 @@ set -ex
1212
# shellcheck source=/dev/null
1313
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
1414

15+
install_ubuntu() {
16+
echo "Preparing to build sccache from source"
17+
apt-get update
18+
# libssl-dev will not work as it is upgraded to libssl3 in Ubuntu-22.04.
19+
# Instead use lib and headers from OpenSSL1.1 installed in `install_openssl.sh``
20+
apt-get install -y cargo
21+
echo "Checking out sccache repo"
22+
git clone https://github.com/mozilla/sccache -b v0.8.2
23+
24+
cd sccache
25+
echo "Building sccache"
26+
cargo build --release
27+
cp target/release/sccache /opt/cache/bin
28+
echo "Cleaning up"
29+
cd ..
30+
rm -rf sccache
31+
apt-get remove -y cargo rustc
32+
apt-get autoclean && apt-get clean
33+
}
34+
1535
install_binary() {
1636
echo "Downloading sccache binary from S3 repo"
1737
curl --retry 3 https://s3.amazonaws.com/ossci-linux/sccache -o /opt/cache/bin/sccache
@@ -22,15 +42,33 @@ mkdir -p /opt/cache/bin
2242
sed -e 's|PATH="\(.*\)"|PATH="/opt/cache/bin:\1"|g' -i /etc/environment
2343
export PATH="/opt/cache/bin:$PATH"
2444

25-
# NB: Install the pre-built binary from S3 as building from source
26-
# https://github.com/pytorch/sccache has started failing mysteriously
27-
# in which sccache server couldn't start with the following error:
28-
# sccache: error: Invalid argument (os error 22)
29-
install_binary
45+
install_ubuntu
3046

3147
function write_sccache_stub() {
3248
BINARY=$1
33-
printf "#!/bin/sh\nif [ \$(env -u LD_PRELOAD ps -p \$PPID -o comm=) != sccache ]; then\n exec sccache %s \"\$@\"\nelse\n exec %s \"\$@\"\nfi" "$(which "${BINARY}")" "$(which "${BINARY}")" > "/opt/cache/bin/${BINARY}"
49+
if [ $1 == "gcc" ]; then
50+
# Do not call sccache recursively when dumping preprocessor argument
51+
# For some reason it's very important for the first cached nvcc invocation
52+
cat >"/opt/cache/bin/$1" <<EOF
53+
#!/bin/sh
54+
if [ "\$1" = "-E" ] || [ "\$2" = "-E" ]; then
55+
exec $(which $1) "\$@"
56+
elif [ \$(env -u LD_PRELOAD ps -p \$PPID -o comm=) != sccache ]; then
57+
exec sccache $(which $1) "\$@"
58+
else
59+
exec $(which $1) "\$@"
60+
fi
61+
EOF
62+
else
63+
cat >"/opt/cache/bin/$1" <<EOF
64+
#!/bin/sh
65+
if [ \$(env -u LD_PRELOAD ps -p \$PPID -o comm=) != sccache ]; then
66+
exec sccache $(which $1) "\$@"
67+
else
68+
exec $(which $1) "\$@"
69+
fi
70+
EOF
71+
fi
3472
chmod a+x "/opt/cache/bin/${BINARY}"
3573
}
3674

@@ -44,7 +82,7 @@ init_sccache() {
4482

4583
# NB: This function is adopted from PyTorch core at
4684
# https://github.com/pytorch/pytorch/blob/main/.ci/pytorch/common-build.sh
47-
as_ci_user sccache --stop-server > /dev/null 2>&1 || true
85+
as_ci_user sccache --stop-server >/dev/null 2>&1 || true
4886
rm -f "${SCCACHE_ERROR_LOG}" || true
4987

5088
# Clear sccache stats before using it

.ci/docker/ubuntu/Dockerfile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@ COPY ./common/utils.sh utils.sh
5757
RUN bash ./install_cache.sh && rm install_cache.sh utils.sh
5858
ENV SCCACHE_BUCKET ossci-compiler-cache-circleci-v2
5959
ENV SCCACHE_S3_KEY_PREFIX executorch
60+
ENV SCCACHE_REGION us-east-1
6061

6162
ARG TORCH_VERSION
6263
COPY ./common/install_pytorch.sh install_pytorch.sh

.ci/scripts/gather_test_models.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,16 @@
2020
CUSTOM_RUNNERS = {
2121
"linux": {
2222
# This one runs OOM on smaller runner, the root cause is unclear (T163016365)
23-
"w2l": "linux.12xlarge",
24-
"ic4": "linux.12xlarge",
25-
"resnet50": "linux.12xlarge",
26-
"llava": "linux.12xlarge",
23+
"w2l": "linux.4xlarge.memory",
24+
"ic4": "linux.4xlarge.memory",
25+
"resnet50": "linux.4xlarge.memory",
26+
"llava": "linux.4xlarge.memory",
27+
"llama3_2_vision_encoder": "linux.4xlarge.memory",
28+
"llama3_2_text_decoder": "linux.4xlarge.memory",
2729
# This one causes timeout on smaller runner, the root cause is unclear (T161064121)
28-
"dl3": "linux.12xlarge",
29-
"emformer_join": "linux.12xlarge",
30-
"emformer_predict": "linux.12xlarge",
30+
"dl3": "linux.4xlarge.memory",
31+
"emformer_join": "linux.4xlarge.memory",
32+
"emformer_predict": "linux.4xlarge.memory",
3133
}
3234
}
3335

@@ -37,10 +39,12 @@
3739
"linux": {
3840
"mobilebert": 90,
3941
"emformer_predict": 360,
42+
"llama3_2_text_decoder": 360,
4043
},
4144
"macos": {
4245
"mobilebert": 90,
4346
"emformer_predict": 360,
47+
"llama3_2_text_decoder": 360,
4448
},
4549
}
4650

.ci/scripts/setup-macos.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,9 @@ install_buck() {
4949

5050
rm "${BUCK2}"
5151
popd
52+
53+
# Kill all running buck2 daemon for a fresh start
54+
buck2 killall || true
5255
}
5356

5457
function write_sccache_stub() {

.ci/scripts/test_llama.sh

Lines changed: 54 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,51 @@ set -exu
99
# shellcheck source=/dev/null
1010
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
1111

12-
MODEL_NAME=$1 # stories110M
13-
BUILD_TOOL=$2 # buck2 or cmake
14-
DTYPE=$3 # fp16, bf16, or fp32
15-
MODE=${4:-"xnnpack+custom"} # portable or xnnpack+custom or xnnpack+custom+qe
16-
UPLOAD_DIR=${5:-}
12+
while [[ $# -gt 0 ]]; do
13+
case "$1" in
14+
-model)
15+
MODEL_NAME="$2" # stories110M
16+
shift 2
17+
;;
18+
-build_tool)
19+
BUILD_TOOL="$2" # buck2 or cmake
20+
shift 2
21+
;;
22+
-dtype)
23+
DTYPE="$2" # fp16, bf16, or fp32
24+
shift 2
25+
;;
26+
-mode)
27+
MODE="$2" # portable or xnnpack+custom or xnnpack+custom+qe
28+
shift 2
29+
;;
30+
-pt2e_quantize)
31+
PT2E_QUANTIZE="$2"
32+
shift 2
33+
;;
34+
-upload)
35+
UPLOAD_DIR="$2"
36+
shift 2
37+
;;
38+
*)
39+
echo "Unknown option: $1"
40+
usage
41+
;;
42+
esac
43+
done
44+
45+
# Default mode to xnnpack+custom if not set
46+
MODE=${MODE:-"xnnpack+custom"}
47+
48+
# Default UPLOAD_DIR to empty string if not set
49+
UPLOAD_DIR="${UPLOAD_DIR:-}"
50+
51+
# Default PT2E_QUANTIZE to empty string if not set
52+
PT2E_QUANTIZE="${PT2E_QUANTIZE:-}"
53+
54+
# Default CMake Build Type to release mode
55+
CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Release}
56+
1757
if [[ $# -lt 4 ]]; then # Assuming 4 mandatory args
1858
echo "Expecting atleast 4 positional arguments"
1959
echo "Usage: [...]"
@@ -106,7 +146,7 @@ cmake_install_executorch_libraries() {
106146
rm -rf cmake-out
107147
retry cmake \
108148
-DCMAKE_INSTALL_PREFIX=cmake-out \
109-
-DCMAKE_BUILD_TYPE=Debug \
149+
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
110150
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
111151
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
112152
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
@@ -120,22 +160,22 @@ cmake_install_executorch_libraries() {
120160
-DQNN_SDK_ROOT="$QNN_SDK_ROOT" \
121161
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
122162
-Bcmake-out .
123-
cmake --build cmake-out -j9 --target install --config Debug
163+
cmake --build cmake-out -j9 --target install --config "$CMAKE_BUILD_TYPE"
124164
}
125165

126166
cmake_build_llama_runner() {
127167
echo "Building llama runner"
128168
dir="examples/models/llama"
129169
retry cmake \
130170
-DCMAKE_INSTALL_PREFIX=cmake-out \
131-
-DCMAKE_BUILD_TYPE=Debug \
171+
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
132172
-DEXECUTORCH_BUILD_KERNELS_CUSTOM="$CUSTOM" \
133173
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
134174
-DEXECUTORCH_BUILD_XNNPACK="$XNNPACK" \
135175
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
136176
-Bcmake-out/${dir} \
137177
${dir}
138-
cmake --build cmake-out/${dir} -j9 --config Debug
178+
cmake --build cmake-out/${dir} -j9 --config "$CMAKE_BUILD_TYPE"
139179

140180
}
141181

@@ -150,7 +190,7 @@ cleanup_files() {
150190
}
151191

152192
prepare_artifacts_upload() {
153-
if [ -n "$UPLOAD_DIR" ]; then
193+
if [ -n "${UPLOAD_DIR}" ]; then
154194
echo "Preparing for uploading generated artifacs"
155195
zip -j model.zip "${EXPORTED_MODEL_NAME}" tokenizer.bin
156196
mkdir -p "${UPLOAD_DIR}"
@@ -204,6 +244,10 @@ if [[ "${COREML}" == "ON" ]]; then
204244
fi
205245
if [[ "${QNN}" == "ON" ]]; then
206246
EXPORT_ARGS="${EXPORT_ARGS} -kv -v --qnn --disable_dynamic_shape"
247+
echo "PT2E_QUANTIZE is ${PT2E_QUANTIZE}"
248+
if [[ "${PT2E_QUANTIZE}" == "qnn_16a16w" ]]; then
249+
EXPORT_ARGS+=" --tokenizer_path tokenizer.model --pt2e_quantize qnn_16a16w --calibration_tasks wikitext --calibration_limit 1 --calibration_seq_length 128 --calibration_data Once "
250+
fi
207251
fi
208252
# Add dynamically linked library location
209253
$PYTHON_EXECUTABLE -m examples.models.llama.export_llama ${EXPORT_ARGS}

.ci/scripts/test_llava.sh

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,11 @@
88
set -exu
99
# shellcheck source=/dev/null
1010

11-
BUILD_TYPE=${1:-Debug}
1211
TARGET_OS=${2:-Native}
1312
BUILD_DIR=${3:-cmake-out}
13+
CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Release}
1414

15-
echo "Building with BUILD_TYPE: $BUILD_TYPE, TARGET_OS: $TARGET_OS, BUILD_DIR: $BUILD_DIR"
15+
echo "Building with CMAKE_BUILD_TYPE: $CMAKE_BUILD_TYPE, TARGET_OS: $TARGET_OS, BUILD_DIR: $BUILD_DIR"
1616

1717
if [[ -z "${PYTHON_EXECUTABLE:-}" ]]; then
1818
PYTHON_EXECUTABLE=python3
@@ -32,7 +32,7 @@ if hash nproc &> /dev/null; then NPROC=$(nproc); fi
3232

3333
EXECUTORCH_COMMON_CMAKE_ARGS=" \
3434
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
35-
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
35+
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
3636
-DEXECUTORCH_ENABLE_LOGGING=ON \
3737
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
3838
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
@@ -49,7 +49,7 @@ cmake_install_executorch_libraries() {
4949
${EXECUTORCH_COMMON_CMAKE_ARGS} \
5050
-B${BUILD_DIR} .
5151

52-
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
52+
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${CMAKE_BUILD_TYPE}
5353
}
5454

5555
cmake_install_executorch_libraries_for_android() {
@@ -59,14 +59,14 @@ cmake_install_executorch_libraries_for_android() {
5959
${EXECUTORCH_COMMON_CMAKE_ARGS} \
6060
-B${BUILD_DIR} .
6161

62-
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
62+
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${CMAKE_BUILD_TYPE}
6363
}
6464

6565

6666
LLAVA_COMMON_CMAKE_ARGS=" \
6767
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
6868
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
69-
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
69+
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
7070
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
7171
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
7272
-DEXECUTORCH_BUILD_XNNPACK=ON"
@@ -81,7 +81,7 @@ cmake_build_llava_runner() {
8181
-B${BUILD_DIR}/${dir} \
8282
${dir}
8383

84-
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${BUILD_TYPE}
84+
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${CMAKE_BUILD_TYPE}
8585
}
8686

8787

@@ -98,7 +98,7 @@ cmake_build_llava_runner_for_android() {
9898
-B${BUILD_DIR}/${dir} \
9999
${dir}
100100

101-
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${BUILD_TYPE}
101+
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${CMAKE_BUILD_TYPE}
102102
}
103103

104104
# only export the one without custom op for now since it's

.ci/scripts/test_model.sh

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ test_model() {
7777
# Install requirements for export_llama
7878
bash examples/models/llama/install_requirements.sh
7979
# Test export_llama script: python3 -m examples.models.llama.export_llama
80-
"${PYTHON_EXECUTABLE}" -m examples.models.llama.export_llama -c examples/models/llama/params/demo_rand_params.pth -p examples/models/llama/params/demo_config.json
80+
"${PYTHON_EXECUTABLE}" -m examples.models.llama.export_llama --model "${MODEL_NAME}" -c examples/models/llama/params/demo_rand_params.pth -p examples/models/llama/params/demo_config.json
8181
run_portable_executor_runner
8282
rm "./${MODEL_NAME}.pte"
8383
fi
@@ -87,6 +87,10 @@ test_model() {
8787
bash examples/models/llava/install_requirements.sh
8888
STRICT="--no-strict"
8989
fi
90+
if [[ "$MODEL_NAME" == "llama3_2_vision_encoder" || "$MODEL_NAME" == "llama3_2_text_decoder" ]]; then
91+
# Install requirements for llama vision.
92+
bash examples/models/llama3_2_vision/install_requirements.sh
93+
fi
9094
# python3 -m examples.portable.scripts.export --model_name="llama2" should works too
9195
"${PYTHON_EXECUTABLE}" -m examples.portable.scripts.export --model_name="${MODEL_NAME}" "${STRICT}"
9296
run_portable_executor_runner

.github/scripts/check_labels.py

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
#!/usr/bin/env python3
2+
"""Check whether a PR has required labels."""
3+
4+
import sys
5+
from typing import Any
6+
7+
from github_utils import gh_delete_comment, gh_post_pr_comment
8+
from gitutils import get_git_remote_name, get_git_repo_dir, GitRepo
9+
from label_utils import has_required_labels, is_label_err_comment, LABEL_ERR_MSG
10+
from trymerge import GitHubPR
11+
12+
13+
def delete_all_label_err_comments(pr: "GitHubPR") -> None:
14+
for comment in pr.get_comments():
15+
if is_label_err_comment(comment):
16+
gh_delete_comment(pr.org, pr.project, comment.database_id)
17+
18+
19+
def add_label_err_comment(pr: "GitHubPR") -> None:
20+
# Only make a comment if one doesn't exist already
21+
if not any(is_label_err_comment(comment) for comment in pr.get_comments()):
22+
gh_post_pr_comment(pr.org, pr.project, pr.pr_num, LABEL_ERR_MSG)
23+
24+
25+
def parse_args() -> Any:
26+
from argparse import ArgumentParser
27+
28+
parser = ArgumentParser("Check PR labels")
29+
parser.add_argument("pr_num", type=int)
30+
# add a flag to return a non-zero exit code if the PR does not have the required labels
31+
parser.add_argument(
32+
"--exit-non-zero",
33+
action="store_true",
34+
help="Return a non-zero exit code if the PR does not have the required labels",
35+
)
36+
37+
return parser.parse_args()
38+
39+
40+
def main() -> None:
41+
args = parse_args()
42+
repo = GitRepo(get_git_repo_dir(), get_git_remote_name())
43+
org, project = repo.gh_owner_and_name()
44+
pr = GitHubPR(org, project, args.pr_num)
45+
46+
try:
47+
if not has_required_labels(pr):
48+
print(LABEL_ERR_MSG, flush=True)
49+
add_label_err_comment(pr)
50+
if args.exit_non_zero:
51+
raise RuntimeError("PR does not have required labels")
52+
else:
53+
delete_all_label_err_comments(pr)
54+
except Exception as e:
55+
if args.exit_non_zero:
56+
raise RuntimeError(f"Error checking labels: {e}") from e
57+
58+
sys.exit(0)
59+
60+
61+
if __name__ == "__main__":
62+
main()

0 commit comments

Comments
 (0)