Skip to content

Commit d83a541

Browse files
authored
Merge branch 'main' into manual-registration-lib-name-11221
2 parents 2635d1e + ef48cc2 commit d83a541

File tree

600 files changed

+32496
-6427
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

600 files changed

+32496
-6427
lines changed
File renamed without changes.
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
9b498d3bb28b8e3411ce464dd2755c5b96d92c8f
1+
90f1e7bed15ca5e48c61c5b6dc5ad4810524f82f

.ci/scripts/check_c10_sync.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,4 @@ pushd pytorch
1212
git checkout "$pytorch_pin"
1313
popd
1414
"$(dirname "${BASH_SOURCE[0]}")"/compare_dirs.sh runtime/core/portable_type/c10/c10 pytorch/c10
15-
"$(dirname "${BASH_SOURCE[0]}")"/compare_dirs.sh runtime/core/portable_type/c10/torch/standalone pytorch/torch/standalone
15+
"$(dirname "${BASH_SOURCE[0]}")"/compare_dirs.sh runtime/core/portable_type/c10/torch/headeronly pytorch/torch/headeronly

.ci/scripts/setup-arm-baremetal-tools.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,4 @@
88
# Setup arm example environment (including TOSA tools)
99
git config --global user.email "[email protected]"
1010
git config --global user.name "Github Executorch"
11-
bash examples/arm/setup.sh --i-agree-to-the-contained-eula
11+
bash examples/arm/setup.sh --i-agree-to-the-contained-eula ${@:-}

.ci/scripts/setup-emscripten.sh

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
2+
set -ex
3+
4+
install_emscripten() {
5+
git clone https://github.com/emscripten-core/emsdk.git
6+
pushd emsdk || return
7+
./emsdk install 4.0.10
8+
./emsdk activate 4.0.10
9+
source ./emsdk_env.sh
10+
popd || return
11+
}
12+
13+
install_emscripten

.ci/scripts/test_llama_torchao_lowbit.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,15 +35,15 @@ cmake -DPYTHON_EXECUTABLE=python \
3535
-DEXECUTORCH_BUILD_XNNPACK=OFF \
3636
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
3737
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
38-
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
38+
-DEXECUTORCH_BUILD_KERNELS_LLM=ON \
3939
-Bcmake-out .
4040
cmake --build cmake-out -j16 --target install --config Release
4141

4242
# Install llama runner with torchao
4343
cmake -DPYTHON_EXECUTABLE=python \
4444
-DBUILD_TESTING=OFF \
4545
-DCMAKE_BUILD_TYPE=Release \
46-
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
46+
-DEXECUTORCH_BUILD_KERNELS_LLM=ON \
4747
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
4848
-DEXECUTORCH_BUILD_XNNPACK=OFF \
4949
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \

.ci/scripts/test_llava.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ EXECUTORCH_COMMON_CMAKE_ARGS=" \
3939
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
4040
-DEXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR=ON \
4141
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
42-
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
42+
-DEXECUTORCH_BUILD_KERNELS_LLM=ON \
4343
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
4444
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
4545
-DEXECUTORCH_BUILD_XNNPACK=ON \
@@ -69,7 +69,7 @@ LLAVA_COMMON_CMAKE_ARGS=" \
6969
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
7070
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
7171
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
72-
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
72+
-DEXECUTORCH_BUILD_KERNELS_LLM=ON \
7373
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
7474
-DEXECUTORCH_BUILD_XNNPACK=ON"
7575

.ci/scripts/test_phi_3_mini.sh

Lines changed: 5 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -22,31 +22,14 @@ NPROC=8
2222
if hash nproc &> /dev/null; then NPROC=$(nproc); fi
2323

2424
cmake_install_executorch_libraries() {
25-
cmake -DPYTHON_EXECUTABLE=python \
26-
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
27-
-DEXECUTORCH_ENABLE_LOGGING=1 \
28-
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
29-
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
30-
-DEXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR=ON \
31-
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
32-
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
33-
-DEXECUTORCH_BUILD_XNNPACK=ON \
34-
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
35-
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
36-
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
37-
-B${BUILD_DIR} .
38-
39-
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
25+
rm -rf cmake-out
26+
cmake --preset llm -DCMAKE_INSTALL_PREFIX=cmake-out -DCMAKE_BUILD_TYPE=${BUILD_TYPE}
27+
cmake --build cmake-out -j16 --target install --config ${BUILD_TYPE}
4028
}
4129

4230
cmake_build_phi_3_mini() {
43-
cmake -DPYTHON_EXECUTABLE=$PYTHON_EXECUTABLE \
44-
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
31+
cmake -DCMAKE_PREFIX_PATH=${BUILD_DIR} \
4532
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
46-
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
47-
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
48-
-DEXECUTORCH_BUILD_XNNPACK=ON \
49-
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
5033
-B${BUILD_DIR}/${MODEL_DIR} \
5134
${MODEL_DIR}
5235

@@ -81,7 +64,7 @@ run_and_verify() {
8164
${BUILD_DIR}/${MODEL_DIR}/phi_3_mini_runner \
8265
--model_path=phi-3-mini.pte \
8366
--tokenizer_path=tokenizer.bin \
84-
--seq_len=128 \
67+
--seq_len=60 \
8568
--temperature=0 \
8669
--prompt="<|system|>
8770
You are a helpful assistant.<|end|>

.ci/scripts/test_yolo12.sh

Lines changed: 197 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,197 @@
1+
#!/bin/bash
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
set -ex
9+
# shellcheck source=/dev/null
10+
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
11+
12+
while [[ $# -gt 0 ]]; do
13+
case "$1" in
14+
-model)
15+
MODEL_NAME="$2" # stories110M
16+
shift 2
17+
;;
18+
-mode)
19+
MODE="$2" # portable or xnnpack+custom or xnnpack+custom+qe
20+
shift 2
21+
;;
22+
-pt2e_quantize)
23+
PT2E_QUANTIZE="$2"
24+
shift 2
25+
;;
26+
-upload)
27+
UPLOAD_DIR="$2"
28+
shift 2
29+
;;
30+
-video_path)
31+
VIDEO_PATH="$2" # portable or xnnpack+custom or xnnpack+custom+qe
32+
shift 2
33+
;;
34+
*)
35+
echo "Unknown option: $1"
36+
usage
37+
;;
38+
esac
39+
done
40+
41+
# Default mode to xnnpack+custom if not set
42+
MODE=${MODE:-"openvino"}
43+
44+
# Default UPLOAD_DIR to empty string if not set
45+
UPLOAD_DIR="${UPLOAD_DIR:-}"
46+
47+
# Default PT2E_QUANTIZE to empty string if not set
48+
PT2E_QUANTIZE="${PT2E_QUANTIZE:-}"
49+
50+
# Default CMake Build Type to release mode
51+
CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Release}
52+
53+
if [[ $# -lt 5 ]]; then # Assuming 4 mandatory args
54+
echo "Expecting atleast 5 positional arguments"
55+
echo "Usage: [...]"
56+
fi
57+
if [[ -z "${MODEL_NAME:-}" ]]; then
58+
echo "Missing model name, exiting..."
59+
exit 1
60+
fi
61+
62+
63+
if [[ -z "${MODE:-}" ]]; then
64+
echo "Missing mode, choose openvino or xnnpack, exiting..."
65+
exit 1
66+
fi
67+
68+
if [[ -z "${PYTHON_EXECUTABLE:-}" ]]; then
69+
PYTHON_EXECUTABLE=python3
70+
fi
71+
72+
TARGET_LIBS=""
73+
74+
if [[ "${MODE}" =~ .*openvino.* ]]; then
75+
OPENVINO=ON
76+
TARGET_LIBS="$TARGET_LIBS openvino_backend "
77+
78+
git clone https://github.com/openvinotoolkit/openvino.git
79+
cd openvino && git b16b776ac119dafda51f69a80f1e6b7376d02c3b
80+
git submodule update --init --recursive
81+
sudo ./install_build_dependencies.sh
82+
mkdir build && cd build
83+
cmake .. -DCMAKE_BUILD_TYPE=Release -DENABLE_PYTHON=ON
84+
make -j$(nproc)
85+
86+
cd ..
87+
cmake --install build --prefix dist
88+
89+
source dist/setupvars.sh
90+
cd ../backends/openvino
91+
pip install -r requirements.txt
92+
cd ../../
93+
else
94+
OPENVINO=OFF
95+
fi
96+
97+
if [[ "${MODE}" =~ .*xnnpack.* ]]; then
98+
XNNPACK=ON
99+
TARGET_LIBS="$TARGET_LIBS xnnpack_backend "
100+
else
101+
XNNPACK=OFF
102+
fi
103+
104+
which "${PYTHON_EXECUTABLE}"
105+
106+
107+
DIR="examples/models/yolo12"
108+
$PYTHON_EXECUTABLE -m pip install -r ${DIR}/requirements.txt
109+
110+
cmake_install_executorch_libraries() {
111+
rm -rf cmake-out
112+
build_dir=cmake-out
113+
mkdir $build_dir
114+
115+
116+
retry cmake -DCMAKE_INSTALL_PREFIX="${build_dir}" \
117+
-DCMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
118+
-DEXECUTORCH_BUILD_OPENVINO="$OPENVINO" \
119+
-DEXECUTORCH_BUILD_XNNPACK="$XNNPACK" \
120+
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
121+
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
122+
-DEXECUTORCH_BUILD_EXTENSION_RUNNER_UTIL=ON \
123+
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
124+
-B"${build_dir}"
125+
126+
# Build the project
127+
cmake --build ${build_dir} --target install --config ${CMAKE_BUILD_TYPE} -j$(nproc)
128+
129+
export CMAKE_ARGS="
130+
-DEXECUTORCH_BUILD_OPENVINO="$OPENVINO" \
131+
-DEXECUTORCH_BUILD_XNNPACK="$XNNPACK" \
132+
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
133+
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
134+
-DEXECUTORCH_BUILD_EXTENSION_RUNNER_UTIL=ON \
135+
-DEXECUTORCH_ENABLE_LOGGING=ON \
136+
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
137+
-DEXECUTORCH_BUILD_PYBIND=ON"
138+
139+
echo $TARGET_LIBS
140+
export CMAKE_BUILD_ARGS="--target $TARGET_LIBS"
141+
pip install . --no-build-isolation
142+
}
143+
144+
cmake_build_demo() {
145+
echo "Building yolo12 runner"
146+
retry cmake \
147+
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
148+
-DUSE_OPENVINO_BACKEND="$OPENVINO" \
149+
-DUSE_XNNPACK_BACKEND="$XNNPACK" \
150+
-Bcmake-out/${DIR} \
151+
${DIR}
152+
cmake --build cmake-out/${DIR} -j9 --config "$CMAKE_BUILD_TYPE"
153+
154+
}
155+
156+
cleanup_files() {
157+
rm $EXPORTED_MODEL_NAME
158+
}
159+
160+
prepare_artifacts_upload() {
161+
if [ -n "${UPLOAD_DIR}" ]; then
162+
echo "Preparing for uploading generated artifacs"
163+
zip -j model.zip "${EXPORTED_MODEL_NAME}"
164+
mkdir -p "${UPLOAD_DIR}"
165+
mv model.zip "${UPLOAD_DIR}"
166+
mv result.txt "${UPLOAD_DIR}"
167+
168+
fi
169+
}
170+
171+
172+
# Export model.
173+
EXPORTED_MODEL_NAME="${MODEL_NAME}_fp32_${MODE}.pte"
174+
echo "Exporting ${EXPORTED_MODEL_NAME}"
175+
EXPORT_ARGS="--model_name=${MODEL_NAME} --backend=${MODE}"
176+
177+
# Add dynamically linked library location
178+
cmake_install_executorch_libraries
179+
180+
$PYTHON_EXECUTABLE -m examples.models.yolo12.export_and_validate ${EXPORT_ARGS}
181+
182+
183+
RUNTIME_ARGS="--model_path=${EXPORTED_MODEL_NAME} --input_path=${VIDEO_PATH}"
184+
# Check build tool.
185+
cmake_build_demo
186+
# Run yolo12 runner
187+
NOW=$(date +"%H:%M:%S")
188+
echo "Starting to run yolo12 runner at ${NOW}"
189+
# shellcheck source=/dev/null
190+
cmake-out/examples/models/yolo12/Yolo12DetectionDemo ${RUNTIME_ARGS} > result.txt
191+
NOW=$(date +"%H:%M:%S")
192+
echo "Finished at ${NOW}"
193+
194+
RESULT=$(cat result.txt)
195+
196+
prepare_artifacts_upload
197+
cleanup_files

0 commit comments

Comments
 (0)