@@ -737,63 +737,68 @@ jobs:
737737 uses : pytorch/test-infra/.github/workflows/macos_job.yml@main
738738 with :
739739 runner : macos-m1-stable # needs MPS, was macos-m1-stable
740- script : |
741- export PYTHON_VERSION="3.10"
742- set -x
743- # NS/MC: Remove previous installation of torch and torchao first
744- # as this script does not install anything into conda env but rather as system dep
745- pip3 uninstall -y torch || true
746- set -eou pipefail
747-
748- pip3 uninstall -y torchao || true
749- set -eou pipefail
750-
751- echo "::group::Print machine info"
752- uname -a
753- sysctl machdep.cpu.brand_string
754- sysctl machdep.cpu.core_count
755- echo "::endgroup::"
756-
757- echo "::group::Install requirements"
758- # Install requirements
759- ./install/install_requirements.sh
760- ls -la
761- pwd
762- pip3 list
763- python3 -c 'import torch;print(f"torch: {torch.__version__, torch.version.git_version}")'
764- echo "::endgroup::"
765-
766- echo "::group::Download checkpoints"
767- (
768- mkdir -p checkpoints/stories15M
769- pushd checkpoints/stories15M
770- curl -fsSL -O https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt
771- curl -fsSL -O https://github.com/karpathy/llama2.c/raw/master/tokenizer.model
772- popd
773- )
774- echo "::endgroup::"
775-
776- echo "::group::Run inference"
777- export MODEL_PATH=checkpoints/stories15M/stories15M.pt
778- export MODEL_NAME=stories15M
779- export MODEL_DIR=/tmp
780- for DTYPE in float16 float32; do
781- # if [ $(uname -s) == Darwin ]; then
782- # export DTYPE=float16
783- # fi
784-
785- python3 torchchat.py generate --dtype ${DTYPE} --device mps --checkpoint-path ${MODEL_PATH} --temperature 0
786-
787- python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
788-
789- python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
790-
791- python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
792-
793- python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
794-
795- PYTORCH_ENABLE_MPS_FALLBACK=1 python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --temperature 0
796- done
740+ steps :
741+ - name : Setup Python
742+ uses : actions/setup-python@v2
743+ with :
744+ python-version : 3.10.11
745+ - name : Run test
746+ run : |
747+ export PYTHON_VERSION="3.10"
748+ set -x
749+ # NS/MC: Remove previous installation of torch and torchao first
750+ # as this script does not install anything into conda env but rather as system dep
751+ pip3 uninstall -y torch || true
752+ set -eou pipefail
753+
754+ pip3 uninstall -y torchao || true
755+ set -eou pipefail
756+
757+ echo "::group::Print machine info"
758+ uname -a
759+ sysctl machdep.cpu.brand_string
760+ sysctl machdep.cpu.core_count
761+ echo "::endgroup::"
762+
763+ echo "::group::Install requirements"
764+ # Install requirements
765+ ./install/install_requirements.sh
766+ ls -la
767+ pwd
768+ pip3 list
769+ python3 -c 'import torch;print(f"torch: {torch.__version__, torch.version.git_version}")'
770+ echo "::endgroup::"
771+
772+ echo "::group::Download checkpoints"
773+ (
774+ mkdir -p checkpoints/stories15M
775+ pushd checkpoints/stories15M
776+ curl -fsSL -O https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt
777+ curl -fsSL -O https://github.com/karpathy/llama2.c/raw/master/tokenizer.model
778+ popd
779+ )
780+ echo "::endgroup::"
781+
782+ echo "::group::Run inference"
783+ export MODEL_PATH=checkpoints/stories15M/stories15M.pt
784+ export MODEL_NAME=stories15M
785+ export MODEL_DIR=/tmp
786+ for DTYPE in float16 float32; do
787+ # if [ $(uname -s) == Darwin ]; then
788+ # export DTYPE=float16
789+ # fi
790+
791+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --checkpoint-path ${MODEL_PATH} --temperature 0
792+
793+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
794+
795+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
796+
797+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
798+
799+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
800+
801+ PYTORCH_ENABLE_MPS_FALLBACK=1 python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --temperature 0
797802 compile-gguf :
798803 strategy :
799804 matrix :
@@ -919,6 +924,7 @@ jobs:
919924 run : |
920925 echo "Install ExecuTorch python"
921926 export TORCHCHAT_ROOT=$PWD
927+ export ET_BUILD_DIR="et-build"
922928 ENABLE_ET_PYBIND="${1:-true}"
923929 source "torchchat/utils/scripts/install_utils.sh"
924930 install_executorch_python_libs $ENABLE_ET_PYBIND
@@ -1067,14 +1073,12 @@ jobs:
10671073 echo "et-git-hash=$(cat ${TORCHCHAT_ROOT}/install/.pins/et-pin.txt)" >> "$GITHUB_ENV"
10681074 - name : Load or install ET
10691075 id : install-et
1070- uses : actions/cache@v3
1071- env :
1072- cache-key : et-build-${{runner.os}}-${{runner.arch}}-${{env.et-git-hash}}
1076+ uses : actions/cache@v4
10731077 with :
1074- path : ./et-build
1075- key : ${{env.cache-key}}
1076- restore-keys : |
1077- ${{env.cache-key }}
1078+ path : |
1079+ ./et-build
1080+ ./torchchat/utils/scripts
1081+ key : et-build- ${{runner.os}}-${{runner.arch}}-${{ env.et-git-hash}}-${{ hashFiles('**/install_et.sh') }}
10781082 - if : ${{ steps.install-et.outputs.cache-hit != 'true' }}
10791083 continue-on-error : true
10801084 run : |
@@ -1084,6 +1088,7 @@ jobs:
10841088 run : |
10851089 echo "Install ExecuTorch python"
10861090 export TORCHCHAT_ROOT=$PWD
1091+ export ET_BUILD_DIR="et-build"
10871092 ENABLE_ET_PYBIND="${1:-true}"
10881093 source "torchchat/utils/scripts/install_utils.sh"
10891094 install_executorch_python_libs $ENABLE_ET_PYBIND
0 commit comments