Skip to content

Commit 9505f17

Browse files
ci(workflow): fetch tiny stories15M GGUF via git-lfs before configure; run unit+integration tests
- Use git-lfs to download stories15M-q4_0.gguf from ggml-org/models - Download model before CMake configure so if(EXISTS ...) condition works - Update all config files and tests to use consistent model path - Run comprehensive YAML config test suite in CI Co-Authored-By: Jaime Mizrachi <[email protected]>
1 parent 3b6d738 commit 9505f17

File tree

5 files changed

+15
-11
lines changed

5 files changed

+15
-11
lines changed

.github/workflows/config.yml

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -19,20 +19,24 @@ jobs:
1919
id: depends
2020
run: |
2121
sudo apt-get update
22-
sudo apt-get install -y build-essential cmake wget
22+
sudo apt-get install -y build-essential cmake git-lfs
23+
git lfs install
24+
25+
- name: Download tiny model (stories15M)
26+
run: |
27+
mkdir -p models
28+
git clone https://huggingface.co/ggml-org/models hf-models
29+
ls -la hf-models/tinyllamas/
30+
cp hf-models/tinyllamas/stories15M-q4_0.gguf models/stories15M-q4_0.gguf
31+
ls -lh models/
2332
2433
- name: Build
2534
id: cmake_build
2635
run: |
2736
cmake -B build -DLLAMA_BUILD_TESTS=ON -DLLAMA_BUILD_TOOLS=ON -DLLAMA_FATAL_WARNINGS=ON -DLLAMA_CURL=OFF
2837
cmake --build build --config Release -j $(nproc)
2938
30-
- name: Use existing tiny model
31-
run: |
32-
# Use the existing tinyllama model that's already in the repo
33-
ls -la models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf
34-
3539
- name: Test YAML config functionality
3640
run: |
3741
cd build
38-
ctest -R "test-config-yaml" --verbose --timeout 300
42+
ctest -R "test-config-yaml|test-config-yaml-cli-.*|test-config-yaml-parity" --output-on-failure --timeout 300

configs/minimal.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
model:
2-
path: ../models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf
2+
path: ../models/stories15M-q4_0.gguf
33
n_ctx: 256
44
sampling:
55
seed: 42

configs/override.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
model:
2-
path: ../models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf
2+
path: ../models/stories15M-q4_0.gguf
33
n_ctx: 256
44
sampling:
55
seed: 42

tests/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ endif()
191191
llama_build_and_test(test-thread-safety.cpp ARGS -hf ggml-org/models -hff tinyllamas/stories15M-q4_0.gguf -ngl 99 -p "The meaning of life is" -n 128 -c 256 -ub 32 -np 4 -t 2)
192192

193193
# YAML config integration tests
194-
if(EXISTS ${PROJECT_SOURCE_DIR}/models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf)
194+
if(EXISTS ${PROJECT_SOURCE_DIR}/models/stories15M-q4_0.gguf)
195195
llama_test_cmd(
196196
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/llama-cli
197197
NAME test-config-yaml-cli-only

tests/test-yaml-parity.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ LLAMA_CLI="./llama-cli"
66
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
77
REPO_ROOT="$(dirname "$SCRIPT_DIR")"
88
CONFIG_FILE="$REPO_ROOT/configs/minimal.yaml"
9-
MODEL_PATH="$REPO_ROOT/models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf"
9+
MODEL_PATH="$REPO_ROOT/models/stories15M-q4_0.gguf"
1010

1111
if [ ! -f "$MODEL_PATH" ]; then
1212
echo "Model file not found: $MODEL_PATH"

0 commit comments

Comments
 (0)