File tree Expand file tree Collapse file tree 3 files changed +39
-9
lines changed
Expand file tree Collapse file tree 3 files changed +39
-9
lines changed Original file line number Diff line number Diff line change @@ -9,11 +9,41 @@ set -exu
99# shellcheck source=/dev/null
1010source " $( dirname " ${BASH_SOURCE[0]} " ) /utils.sh"
1111
12- MODEL_NAME=$1 # stories110M
13- BUILD_TOOL=$2 # buck2 or cmake
14- DTYPE=$3 # fp16, bf16, or fp32
15- MODE=${4:- " xnnpack+custom" } # portable or xnnpack+custom or xnnpack+custom+qe
16- UPLOAD_DIR=${5:- }
12+ while [[ $# -gt 0 ]]; do
13+ case " $1 " in
14+ -model)
15+ MODEL_NAME=" $2 "
16+ shift 2
17+ ;;
18+ -build_tool)
19+ BUILD_TOOL=" $2 "
20+ shift 2
21+ ;;
22+ -dtype)
23+ DTYPE=" $2 "
24+ shift 2
25+ ;;
26+ -mode)
27+ MODE=" $2 "
28+ shift 2
29+ ;;
30+ -upload)
31+ UPLOAD_DIR=" $2 "
32+ shift 2
33+ ;;
34+ * )
35+ echo " Unknown option: $1 "
36+ usage
37+ ;;
38+ esac
39+ done
40+
41+ # Default mode to xnnpack+custom if not set
42+ MODE=${MODE:- " xnnpack+custom" }
43+
44+ # Default UPLOAD_DIR to empty string if not set
45+ UPLOAD_DIR=" ${UPLOAD_DIR:- } "
46+
1747if [[ $# -lt 4 ]]; then # Assuming 4 mandatory args
1848 echo " Expecting atleast 4 positional arguments"
1949 echo " Usage: [...]"
@@ -150,7 +180,7 @@ cleanup_files() {
150180}
151181
152182prepare_artifacts_upload () {
153- if [ -n " $UPLOAD_DIR " ]; then
183+ if [ -n " ${ UPLOAD_DIR} " ]; then
154184 echo " Preparing for uploading generated artifacs"
155185 zip -j model.zip " ${EXPORTED_MODEL_NAME} " tokenizer.bin
156186 mkdir -p " ${UPLOAD_DIR} "
Original file line number Diff line number Diff line change @@ -117,7 +117,7 @@ jobs:
117117 # Install requirements for export_llama
118118 PYTHON_EXECUTABLE=python bash examples/models/llama/install_requirements.sh
119119 # Test llama2
120- PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh stories110M "${BUILD_TOOL}" "${DTYPE}" "${MODE}" "${ARTIFACTS_DIR_NAME}"
120+ PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh -model stories110M -build_tool "${BUILD_TOOL}" -dtype "${DTYPE}" -mode "${MODE}" -upload "${ARTIFACTS_DIR_NAME}"
121121
122122 test-llama-runner-linux-android :
123123 name : test-llama-runner-linux-android
@@ -393,7 +393,7 @@ jobs:
393393 # Install requirements for export_llama
394394 PYTHON_EXECUTABLE=python bash examples/models/llama/install_requirements.sh
395395 # Test llama2
396- PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh stories110M "${BUILD_TOOL}" "${DTYPE}" "${MODE}"
396+ PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh -model stories110M -build_tool "${BUILD_TOOL}" -dtype "${DTYPE}" -mode "${MODE}"
397397
398398 test-phi-3-mini-runner-linux :
399399 name : test-phi-3-mini-runner-linux
Original file line number Diff line number Diff line change @@ -261,7 +261,7 @@ jobs:
261261 # Install requirements for export_llama
262262 PYTHON_EXECUTABLE=python ${CONDA_RUN} bash examples/models/llama/install_requirements.sh
263263 # Test llama2
264- PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llama.sh stories110M cmake "${DTYPE}" "${MODE}"
264+ PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llama.sh -model stories110M -build_tool cmake -dtype "${DTYPE}" -mode "${MODE}"
265265
266266 # # TODO(jackzhxng): Runner consistently runs out of memory before test finishes. Try to find a more powerful runner.
267267 # test-llava-runner-macos:
You can’t perform that action at this time.
0 commit comments