Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit 5ab3f26

Browse files
committed
fixes
1 parent 667d78a commit 5ab3f26

File tree

1 file changed

+69
-55
lines changed

1 file changed

+69
-55
lines changed

.github/workflows/pull.yml

Lines changed: 69 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -621,71 +621,87 @@ jobs:
621621
python torchchat.py remove stories15m
622622
623623
test-mps:
624-
uses: pytorch/test-infra/.github/workflows/macos_job.yml@main
625-
with:
626-
runner: macos-m1-stable # neeps MPS, was macos-m1-stable
627-
script: |
628-
export PYTHON_VERSION="3.10"
629-
set -x
630-
# NS/MC: Remove previous installation of torch and torchao first
631-
# as this script does not install anything into conda env but rather as system dep
632-
pip3 uninstall -y torch || true
633-
set -eou pipefail
624+
strategy:
625+
matrix:
626+
runner: [macos-m1-stable ]
627+
runs-on: ${{matrix.runner}}
628+
steps:
629+
- name: Checkout repo
630+
uses: actions/checkout@v2
631+
- name: Setup Python
632+
uses: actions/setup-python@v2
633+
with:
634+
python-version: 3.10.11
635+
- name: Print machine info
636+
run: |
637+
uname -a
638+
if [ $(uname -s) == Darwin ]; then
639+
sysctl machdep.cpu.brand_string
640+
sysctl machdep.cpu.core_count
641+
fi
642+
- name: Run test
643+
run: |
644+
export PYTHON_VERSION="3.10"
645+
set -x
646+
# NS/MC: Remove previous installation of torch and torchao first
647+
# as this script does not install anything into conda env but rather as system dep
648+
pip3 uninstall -y torch || true
649+
set -eou pipefail
634650
635-
pip3 uninstall -y torchao || true
636-
set -eou pipefail
651+
pip3 uninstall -y torchao || true
652+
set -eou pipefail
637653
638-
echo "::group::Print machine info"
639-
uname -a
640-
sysctl machdep.cpu.brand_string
641-
sysctl machdep.cpu.core_count
642-
echo "::endgroup::"
654+
echo "::group::Print machine info"
655+
uname -a
656+
sysctl machdep.cpu.brand_string
657+
sysctl machdep.cpu.core_count
658+
echo "::endgroup::"
643659
644-
echo "::group::Install requirements"
645-
# Install requirements
646-
./install/install_requirements.sh
647-
ls -la
648-
pwd
649-
pip3 list
650-
python3 -c 'import torch;print(f"torch: {torch.__version__, torch.version.git_version}")'
651-
echo "::endgroup::"
660+
echo "::group::Install requirements"
661+
# Install requirements
662+
./install/install_requirements.sh
663+
ls -la
664+
pwd
665+
pip3 list
666+
python3 -c 'import torch;print(f"torch: {torch.__version__, torch.version.git_version}")'
667+
echo "::endgroup::"
652668
653-
echo "::group::Download checkpoints"
654-
(
655-
mkdir -p checkpoints/stories15M
656-
pushd checkpoints/stories15M
657-
curl -fsSL -O https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt
658-
curl -fsSL -O https://github.com/karpathy/llama2.c/raw/master/tokenizer.model
659-
popd
660-
)
661-
echo "::endgroup::"
669+
echo "::group::Download checkpoints"
670+
(
671+
mkdir -p checkpoints/stories15M
672+
pushd checkpoints/stories15M
673+
curl -fsSL -O https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt
674+
curl -fsSL -O https://github.com/karpathy/llama2.c/raw/master/tokenizer.model
675+
popd
676+
)
677+
echo "::endgroup::"
662678
663-
echo "::group::Run inference"
664-
export MODEL_PATH=checkpoints/stories15M/stories15M.pt
665-
export MODEL_NAME=stories15M
666-
export MODEL_DIR=/tmp
679+
echo "::group::Run inference"
680+
export MODEL_PATH=checkpoints/stories15M/stories15M.pt
681+
export MODEL_NAME=stories15M
682+
export MODEL_DIR=/tmp
667683
668-
python3 torchchat.py generate --device mps --checkpoint-path ${MODEL_PATH} --temperature 0
684+
python3 torchchat.py generate --device mps --checkpoint-path ${MODEL_PATH} --temperature 0
669685
670-
echo "************************************************************"
671-
echo "*** embedding"
672-
echo "************************************************************"
686+
echo "************************************************************"
687+
echo "*** embedding"
688+
echo "************************************************************"
673689
674-
python3 torchchat.py generate --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
675-
python3 torchchat.py generate --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
690+
python3 torchchat.py generate --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
691+
python3 torchchat.py generate --device mps --quant '{"embedding" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
676692
677-
echo "************************************************************"
678-
echo "*** linear int8"
679-
echo "************************************************************"
693+
echo "************************************************************"
694+
echo "*** linear int8"
695+
echo "************************************************************"
680696
681-
python3 torchchat.py generate --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
682-
python3 torchchat.py generate --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
697+
python3 torchchat.py generate --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 0}}' --checkpoint-path ${MODEL_PATH} --temperature 0
698+
python3 torchchat.py generate --device mps --quant '{"linear:int8" : {"bitwidth": 8, "groupsize": 8}}' --checkpoint-path ${MODEL_PATH} --temperature 0
683699
684-
echo "************************************************************"
685-
echo "*** linear int4"
686-
echo "************************************************************"
700+
echo "************************************************************"
701+
echo "*** linear int4"
702+
echo "************************************************************"
687703
688-
PYTORCH_ENABLE_MPS_FALLBACK=1 python3 torchchat.py generate --device mps --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --temperature 0
704+
PYTORCH_ENABLE_MPS_FALLBACK=1 python3 torchchat.py generate --device mps --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --temperature 0
689705
test-gguf-util:
690706
strategy:
691707
matrix:
@@ -939,7 +955,6 @@ jobs:
939955
ENABLE_ET_PYBIND="${1:-true}"
940956
source "torchchat/utils/scripts/install_utils.sh"
941957
install_executorch_python_libs $ENABLE_ET_PYBIND
942-
popd
943958
- name: Install runner
944959
run: |
945960
echo "Installing runner"
@@ -1103,7 +1118,6 @@ jobs:
11031118
ENABLE_ET_PYBIND="${1:-true}"
11041119
source "torchchat/utils/scripts/install_utils.sh"
11051120
install_executorch_python_libs $ENABLE_ET_PYBIND
1106-
popd
11071121
- name: Install runner
11081122
run: |
11091123
echo "Installing runner"

0 commit comments

Comments
 (0)