@@ -30,19 +30,19 @@ export TMP_DIR=$(mktemp -d)
3030export PATH=" ${TMP_DIR} :$PATH "
3131trap ' rm -rfv ${TMP_DIR}' EXIT
3232
33- # Setup MacOS dependencies as there is no Docker support on MacOS atm
34- PYTHON_EXECUTABLE=python \
35- EXECUTORCH_BUILD_PYBIND=ON \
36- CMAKE_ARGS=" -DEXECUTORCH_BUILD_COREML=ON -DEXECUTORCH_BUILD_MPS=ON -DEXECUTORCH_BUILD_XNNPACK=ON -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON" \
37- ${CONDA_RUN} --no-capture-output \
38- .ci/scripts/setup-macos.sh " ${BUILD_TOOL} " " ${BUILD_MODE} "
33+ if [[ " $BUILD_TOOL " == " cmake" ]]; then
34+ # Setup MacOS dependencies as there is no Docker support on MacOS atm
35+ PYTHON_EXECUTABLE=python \
36+ EXECUTORCH_BUILD_PYBIND=ON \
37+ CMAKE_ARGS=" -DEXECUTORCH_BUILD_COREML=ON -DEXECUTORCH_BUILD_MPS=ON -DEXECUTORCH_BUILD_XNNPACK=ON -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON" \
38+ ${CONDA_RUN} --no-capture-output \
39+ .ci/scripts/setup-macos.sh " ${BUILD_TOOL} " " ${BUILD_MODE} "
3940
40- # Install llama3_2_vision dependencies.
41- PYTHON_EXECUTABLE=python \
42- ${CONDA_RUN} --no-capture-output \
43- ./examples/models/llama3_2_vision/install_requirements.sh
41+ # Install llama3_2_vision dependencies.
42+ PYTHON_EXECUTABLE=python \
43+ ${CONDA_RUN} --no-capture-output \
44+ ./examples/models/llama3_2_vision/install_requirements.sh
4445
45- if [[ " $BUILD_TOOL " == " cmake" ]]; then
4646 .ci/scripts/unittest-macos-cmake.sh
4747elif [[ " $BUILD_TOOL " == " buck2" ]]; then
4848 .ci/scripts/unittest-buck2.sh
0 commit comments