Skip to content

Commit 7cb10bf

Browse files
committed
restore ci jobs
1 parent 7bac02a commit 7cb10bf

File tree

1 file changed

+28
-28
lines changed

1 file changed

+28
-28
lines changed

.github/workflows/pull.yml

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -653,33 +653,33 @@ jobs:
653653
# run eval_llama wikitext task
654654
PYTHON_EXECUTABLE=python bash .ci/scripts/test_eval_llama_wikitext.sh
655655
656-
TODO(larryliu0820): Fix this issue before reenabling it: https://gist.github.com/larryliu0820/7377ecd0d79dbc06076cec8d9f2b85d2
657-
test-eval_llama-mmlu-linux:
658-
name: test-eval_llama-mmlu-linux
659-
uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main
660-
permissions:
661-
id-token: write
662-
contents: read
663-
strategy:
664-
fail-fast: false
665-
with:
666-
runner: linux.24xlarge
667-
docker-image: ci-image:executorch-ubuntu-22.04-clang12
668-
submodules: 'recursive'
669-
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
670-
timeout: 90
671-
script: |
672-
# The generic Linux job chooses to use base env, not the one setup by the image
673-
CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
674-
conda activate "${CONDA_ENV}"
675-
676-
PYTHON_EXECUTABLE=python bash .ci/scripts/setup-linux.sh --build-tool "cmake"
677-
678-
# install llama requirements
679-
bash examples/models/llama/install_requirements.sh
680-
681-
# run eval_llama mmlu task
682-
PYTHON_EXECUTABLE=python bash .ci/scripts/test_eval_llama_mmlu.sh
656+
# TODO(larryliu0820): Fix this issue before reenabling it: https://gist.github.com/larryliu0820/7377ecd0d79dbc06076cec8d9f2b85d2
657+
# test-eval_llama-mmlu-linux:
658+
# name: test-eval_llama-mmlu-linux
659+
# uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main
660+
# permissions:
661+
# id-token: write
662+
# contents: read
663+
# strategy:
664+
# fail-fast: false
665+
# with:
666+
# runner: linux.24xlarge
667+
# docker-image: ci-image:executorch-ubuntu-22.04-clang12
668+
# submodules: 'recursive'
669+
# ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
670+
# timeout: 90
671+
# script: |
672+
# # The generic Linux job chooses to use base env, not the one setup by the image
673+
# CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
674+
# conda activate "${CONDA_ENV}"
675+
676+
# PYTHON_EXECUTABLE=python bash .ci/scripts/setup-linux.sh --build-tool "cmake"
677+
678+
# # install llama requirements
679+
# bash examples/models/llama/install_requirements.sh
680+
681+
# # run eval_llama mmlu task
682+
# PYTHON_EXECUTABLE=python bash .ci/scripts/test_eval_llama_mmlu.sh
683683

684684
test-llama_runner_eager-linux:
685685
name: test-llama_runner_eager-linux
@@ -875,7 +875,7 @@ jobs:
875875
CMAKE_ARGS="-DEXECUTORCH_BUILD_NXP_NEUTRON=ON" \
876876
.ci/scripts/setup-linux.sh --build-tool "cmake"
877877
878-
Install test requirements
878+
# Install test requirements
879879
pip install -r backends/nxp/requirements-tests-pypi.txt
880880
pip install -r backends/nxp/requirements-tests-eiq.txt
881881

0 commit comments

Comments
 (0)