Skip to content

Commit 56001c3

Browse files
authored
Fix stories model name
Differential Revision: D61563540 Pull Request resolved: #4803
1 parent 055af09 commit 56001c3

File tree

4 files changed

+11
-9
lines changed

4 files changed

+11
-9
lines changed

.ci/scripts/test_llama.sh

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ set -exu
99
# shellcheck source=/dev/null
1010
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
1111

12-
MODEL_NAME=$1 # stories110M.pt
12+
MODEL_NAME=$1 # stories110M
1313
BUILD_TOOL=$2 # buck2 or cmake
1414
DTYPE=$3 # fp16 or fp32
1515
MODE=${4:-"xnnpack+custom"} # portable or xnnpack+custom or xnnpack+custom+qe
@@ -140,7 +140,7 @@ cmake_build_llama_runner() {
140140

141141
cleanup_files() {
142142
echo "Deleting downloaded and generated files"
143-
rm "${MODEL_NAME}"
143+
rm "${CHECKPOINT_FILE_NAME}"
144144
rm tokenizer.model
145145
rm tokenizer.bin
146146
rm "${EXPORTED_MODEL_NAME}"
@@ -159,8 +159,10 @@ prepare_artifacts_upload() {
159159

160160
# Download and create artifacts.
161161
PARAMS="params.json"
162+
CHECKPOINT_FILE_NAME=""
162163
touch "${PARAMS}"
163-
if [[ "${MODEL_NAME}" == "stories110M.pt" ]]; then
164+
if [[ "${MODEL_NAME}" == "stories110M" ]]; then
165+
CHECKPOINT_FILE_NAME="stories110M.pt"
164166
download_stories_model_artifacts
165167
else
166168
echo "Unsupported model name ${MODEL_NAME}"
@@ -181,7 +183,7 @@ fi
181183
# Export model.
182184
EXPORTED_MODEL_NAME="${EXPORTED_MODEL_NAME}.pte"
183185
echo "Exporting ${EXPORTED_MODEL_NAME}"
184-
EXPORT_ARGS="-c stories110M.pt -p ${PARAMS} -d ${DTYPE} -n ${EXPORTED_MODEL_NAME} -kv"
186+
EXPORT_ARGS="-c ${CHECKPOINT_FILE_NAME} -p ${PARAMS} -d ${DTYPE} -n ${EXPORTED_MODEL_NAME} -kv"
185187
if [[ "${XNNPACK}" == "ON" ]]; then
186188
EXPORT_ARGS="${EXPORT_ARGS} -X -qmode 8da4w -G 128"
187189
fi

.github/workflows/android-perf.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -156,14 +156,14 @@ jobs:
156156
BUILD_MODE="cmake"
157157
DTYPE="fp32"
158158
159-
if [[ ${{ matrix.model }} == "llama*" ]]; then
159+
if [[ ${{ matrix.model }} == "stories*"" ]]; then
160160
# Install requirements for export_llama
161161
PYTHON_EXECUTABLE=python bash examples/models/llama2/install_requirements.sh
162162
# Test llama2
163163
if [[ ${{ matrix.delegate }} == "xnnpack" ]]; then
164164
DELEGATE_CONFIG="xnnpack+custom+qe"
165165
fi
166-
PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh "${{ matrix.model }}.pt" "${BUILD_MODE}" "${DTYPE}" "${DELEGATE_CONFIG}" "${ARTIFACTS_DIR_NAME}"
166+
PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh "${{ matrix.model }}" "${BUILD_MODE}" "${DTYPE}" "${DELEGATE_CONFIG}" "${ARTIFACTS_DIR_NAME}"
167167
else
168168
PYTHON_EXECUTABLE=python bash .ci/scripts/test.sh "${{ matrix.model }}" "${BUILD_MODE}" "${{ matrix.delegate }}" "${ARTIFACTS_DIR_NAME}"
169169
fi

.github/workflows/pull.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ jobs:
112112
# Install requirements for export_llama
113113
PYTHON_EXECUTABLE=python bash examples/models/llama2/install_requirements.sh
114114
# Test llama2
115-
PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh stories110M.pt "${BUILD_TOOL}" "${DTYPE}" "${MODE}"
115+
PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh stories110M "${BUILD_TOOL}" "${DTYPE}" "${MODE}"
116116
117117
test-llama-runner-linux-android:
118118
name: test-llama-runner-linux-android
@@ -406,4 +406,4 @@ jobs:
406406
# Install requirements for export_llama
407407
PYTHON_EXECUTABLE=python bash examples/models/llama2/install_requirements.sh
408408
# Test llama2
409-
PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh stories110M.pt "${BUILD_TOOL}" "${DTYPE}" "${MODE}"
409+
PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh stories110M "${BUILD_TOOL}" "${DTYPE}" "${MODE}"

.github/workflows/trunk.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@ jobs:
269269
# Install requirements for export_llama
270270
PYTHON_EXECUTABLE=python ${CONDA_RUN} bash examples/models/llama2/install_requirements.sh
271271
# Test llama2
272-
PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llama.sh stories110M.pt "${BUILD_TOOL}" "${DTYPE}" "${MODE}"
272+
PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llama.sh stories110M "${BUILD_TOOL}" "${DTYPE}" "${MODE}"
273273
274274
test-qnn-model:
275275
name: test-qnn-model

0 commit comments

Comments
 (0)