@@ -738,68 +738,68 @@ jobs:
738738 matrix :
739739 runner : [macos-m1-stable ]
740740 runs-on : ${{matrix.runner}}
741- steps :
742- - name : Setup Python
743- uses : actions/setup-python@v2
744- with :
745- python-version : 3.10.11
746- - name : Run test
747- run : |
748- export PYTHON_VERSION="3.10"
749- set -x
750- # NS/MC: Remove previous installation of torch and torchao first
751- # as this script does not install anything into conda env but rather as system dep
752- pip3 uninstall -y torch || true
753- set -eou pipefail
754-
755- pip3 uninstall -y torchao || true
756- set -eou pipefail
757-
758- echo "::group::Print machine info"
759- uname -a
760- sysctl machdep.cpu.brand_string
761- sysctl machdep.cpu.core_count
762- echo "::endgroup::"
763-
764- echo "::group::Install requirements"
765- # Install requirements
766- ./install/install_requirements.sh
767- ls -la
768- pwd
769- pip3 list
770- python3 -c 'import torch;print(f"torch: {torch.__version__, torch.version.git_version}")'
771- echo "::endgroup::"
772-
773- echo "::group::Download checkpoints"
774- (
775- mkdir -p checkpoints/stories15M
776- pushd checkpoints/stories15M
777- curl -fsSL -O https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt
778- curl -fsSL -O https://github.com/karpathy/llama2.c/raw/master/tokenizer.model
779- popd
780- )
781- echo "::endgroup::"
782-
783- echo "::group::Run inference"
784- export MODEL_PATH=checkpoints/stories15M/stories15M.pt
785- export MODEL_NAME=stories15M
786- export MODEL_DIR=/tmp
787- for DTYPE in float16 float32; do
788- # if [ $(uname -s) == Darwin ]; then
789- # export DTYPE=float16
790- # fi
791-
792- python3 torchchat.py generate --dtype ${DTYPE} --device mps --checkpoint-path ${MODEL_PATH} --temperature 0
793-
794- python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
795-
796- python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
797-
798- python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
799-
800- python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
801-
802- PYTORCH_ENABLE_MPS_FALLBACK=1 python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --temperature 0
741+ steps :
742+ - name : Setup Python
743+ uses : actions/setup-python@v2
744+ with :
745+ python-version : 3.10.11
746+ - name : Run test
747+ run : |
748+ export PYTHON_VERSION="3.10"
749+ set -x
750+ # NS/MC: Remove previous installation of torch and torchao first
751+ # as this script does not install anything into conda env but rather as system dep
752+ pip3 uninstall -y torch || true
753+ set -eou pipefail
754+
755+ pip3 uninstall -y torchao || true
756+ set -eou pipefail
757+
758+ echo "::group::Print machine info"
759+ uname -a
760+ sysctl machdep.cpu.brand_string
761+ sysctl machdep.cpu.core_count
762+ echo "::endgroup::"
763+
764+ echo "::group::Install requirements"
765+ # Install requirements
766+ ./install/install_requirements.sh
767+ ls -la
768+ pwd
769+ pip3 list
770+ python3 -c 'import torch;print(f"torch: {torch.__version__, torch.version.git_version}")'
771+ echo "::endgroup::"
772+
773+ echo "::group::Download checkpoints"
774+ (
775+ mkdir -p checkpoints/stories15M
776+ pushd checkpoints/stories15M
777+ curl -fsSL -O https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt
778+ curl -fsSL -O https://github.com/karpathy/llama2.c/raw/master/tokenizer.model
779+ popd
780+ )
781+ echo "::endgroup::"
782+
783+ echo "::group::Run inference"
784+ export MODEL_PATH=checkpoints/stories15M/stories15M.pt
785+ export MODEL_NAME=stories15M
786+ export MODEL_DIR=/tmp
787+ for DTYPE in float16 float32; do
788+ # if [ $(uname -s) == Darwin ]; then
789+ # export DTYPE=float16
790+ # fi
791+
792+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --checkpoint-path ${MODEL_PATH} --temperature 0
793+
794+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
795+
796+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
797+
798+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
799+
800+ python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
801+
802+ PYTORCH_ENABLE_MPS_FALLBACK=1 python3 torchchat.py generate --dtype ${DTYPE} --device mps --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --temperature 0
803803 compile-gguf :
804804 strategy :
805805 matrix :
0 commit comments