@@ -75,9 +75,9 @@ run_portable_executor_runner() {
7575test_model () {
7676 if [[ " ${MODEL_NAME} " == " llama2" ]]; then
7777 # Install requirements for export_llama
78- bash examples/models/llama2 /install_requirements.sh
79- # Test export_llama script: python3 -m examples.models.llama2 .export_llama
80- " ${PYTHON_EXECUTABLE} " -m examples.models.llama2 .export_llama -c examples/models/llama2 /params/demo_rand_params.pth -p examples/models/llama2 /params/demo_config.json
78+ bash examples/models/llama /install_requirements.sh
79+ # Test export_llama script: python3 -m examples.models.llama .export_llama
80+ " ${PYTHON_EXECUTABLE} " -m examples.models.llama .export_llama -c examples/models/llama /params/demo_rand_params.pth -p examples/models/llama /params/demo_config.json
8181 run_portable_executor_runner
8282 rm " ./${MODEL_NAME} .pte"
8383 fi
@@ -155,30 +155,24 @@ test_model_with_qnn() {
155155
156156 if [[ " ${MODEL_NAME} " == " dl3" ]]; then
157157 EXPORT_SCRIPT=deeplab_v3
158- EXPORTED_MODEL_NAME=dlv3_qnn.pte
159158 elif [[ " ${MODEL_NAME} " == " mv3" ]]; then
160159 EXPORT_SCRIPT=mobilenet_v3
161- EXPORTED_MODEL_NAME=mv3_qnn.pte
162160 elif [[ " ${MODEL_NAME} " == " mv2" ]]; then
163161 EXPORT_SCRIPT=mobilenet_v2
164- EXPORTED_MODEL_NAME=mv2_qnn.pte
165162 elif [[ " ${MODEL_NAME} " == " ic4" ]]; then
166163 EXPORT_SCRIPT=inception_v4
167- EXPORTED_MODEL_NAME=ic4_qnn.pte
168164 elif [[ " ${MODEL_NAME} " == " ic3" ]]; then
169165 EXPORT_SCRIPT=inception_v3
170- EXPORTED_MODEL_NAME=ic3_qnn.pte
171166 elif [[ " ${MODEL_NAME} " == " vit" ]]; then
172167 EXPORT_SCRIPT=torchvision_vit
173- EXPORTED_MODEL_NAME=vit_qnn.pte
174168 fi
175169
176170 # Use SM8450 for S22, SM8550 for S23, and SM8560 for S24
177171 # TODO(guangyang): Make QNN chipset matches the target device
178172 QNN_CHIPSET=SM8450
179173
180174 " ${PYTHON_EXECUTABLE} " -m examples.qualcomm.scripts.${EXPORT_SCRIPT} -b ${CMAKE_OUTPUT_DIR} -m ${QNN_CHIPSET} --compile_only
181- EXPORTED_MODEL=./${EXPORT_SCRIPT} / ${EXPORTED_MODEL_NAME}
175+ EXPORTED_MODEL=$( find " ./${EXPORT_SCRIPT} " -type f -name " ${MODEL_NAME} *.pte " -print -quit )
182176}
183177
184178test_model_with_coreml () {
@@ -187,7 +181,24 @@ test_model_with_coreml() {
187181 exit 1
188182 fi
189183
190- " ${PYTHON_EXECUTABLE} " -m examples.apple.coreml.scripts.export --model_name=" ${MODEL_NAME} "
184+ DTYPE=float16
185+
186+ " ${PYTHON_EXECUTABLE} " -m examples.apple.coreml.scripts.export --model_name=" ${MODEL_NAME} " --compute_precision " ${DTYPE} "
187+ EXPORTED_MODEL=$( find " ." -type f -name " ${MODEL_NAME} *.pte" -print -quit)
188+ # TODO:
189+ if [ -n " $EXPORTED_MODEL " ]; then
190+ EXPORTED_MODEL_WITH_DTYPE=" ${EXPORTED_MODEL% .pte} _${DTYPE} .pte"
191+ mv " $EXPORTED_MODEL " " $EXPORTED_MODEL_WITH_DTYPE "
192+ EXPORTED_MODEL=" $EXPORTED_MODEL_WITH_DTYPE "
193+ echo " Renamed file path: $EXPORTED_MODEL "
194+ else
195+ echo " No .pte file found"
196+ exit 1
197+ fi
198+ }
199+
200+ test_model_with_mps () {
201+ " ${PYTHON_EXECUTABLE} " -m examples.apple.mps.scripts.mps_example --model_name=" ${MODEL_NAME} " --use_fp16
191202 EXPORTED_MODEL=$( find " ." -type f -name " ${MODEL_NAME} *.pte" -print -quit)
192203}
193204
@@ -206,6 +217,12 @@ elif [[ "${BACKEND}" == "coreml" ]]; then
206217 if [[ $? -eq 0 ]]; then
207218 prepare_artifacts_upload
208219 fi
220+ elif [[ " ${BACKEND} " == " mps" ]]; then
221+ echo " Testing ${MODEL_NAME} with mps..."
222+ test_model_with_mps
223+ if [[ $? -eq 0 ]]; then
224+ prepare_artifacts_upload
225+ fi
209226elif [[ " ${BACKEND} " == " xnnpack" ]]; then
210227 echo " Testing ${MODEL_NAME} with xnnpack..."
211228 WITH_QUANTIZATION=true
0 commit comments