Skip to content

Commit bc11249

Browse files
test: Add unit tests and CTest integration tests for YAML config
- Add test-config-yaml.cpp with unit tests for config parsing and error cases - Add three CTest integration tests: yaml-only, yaml-plus-overrides, parity - Add test-yaml-parity.sh script for comparing YAML vs flags output - Gate integration tests on model file existence to avoid CI failures - Use absolute paths in parity test to handle CTest working directory Co-Authored-By: Jaime Mizrachi <[email protected]>
1 parent 29bc4a5 commit bc11249

File tree

3 files changed

+193
-0
lines changed

3 files changed

+193
-0
lines changed

tests/CMakeLists.txt

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -184,9 +184,34 @@ llama_build_and_test(test-chat-template.cpp)
184184
llama_build_and_test(test-json-partial.cpp)
185185
llama_build_and_test(test-log.cpp)
186186
llama_build_and_test(test-regex-partial.cpp)
187+
llama_build_and_test(test-config-yaml.cpp)
187188

188189
llama_build_and_test(test-thread-safety.cpp ARGS -hf ggml-org/models -hff tinyllamas/stories15M-q4_0.gguf -ngl 99 -p "The meaning of life is" -n 128 -c 256 -ub 32 -np 4 -t 2)
189190

191+
# YAML config integration tests
192+
if(EXISTS ${PROJECT_SOURCE_DIR}/models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf)
193+
llama_test_cmd(
194+
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/llama-cli
195+
NAME test-config-yaml-cli-only
196+
ARGS --config ${PROJECT_SOURCE_DIR}/configs/minimal.yaml -no-cnv
197+
)
198+
199+
llama_test_cmd(
200+
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/llama-cli
201+
NAME test-config-yaml-cli-overrides
202+
ARGS --config ${PROJECT_SOURCE_DIR}/configs/override.yaml -n 8 --temp 0.0 -no-cnv
203+
)
204+
205+
# Parity test - compare YAML config vs equivalent flags
206+
add_test(
207+
NAME test-config-yaml-parity
208+
WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
209+
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/test-yaml-parity.sh
210+
)
211+
set_property(TEST test-config-yaml-parity PROPERTY LABELS main)
212+
set_property(TEST test-config-yaml-parity PROPERTY ENVIRONMENT "PROJECT_SOURCE_DIR=${PROJECT_SOURCE_DIR}")
213+
endif()
214+
190215
# this fails on windows (github hosted runner) due to curl DLL not found (exit code 0xc0000135)
191216
if (NOT WIN32)
192217
llama_build_and_test(test-arg-parser.cpp)

tests/test-config-yaml.cpp

Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
#include "common.h"
2+
#include "config.h"
3+
#include <cassert>
4+
#include <iostream>
5+
#include <fstream>
6+
#include <filesystem>
7+
8+
namespace fs = std::filesystem;
9+
10+
static void test_minimal_config() {
11+
common_params params;
12+
13+
fs::path temp_dir = fs::temp_directory_path() / "llama_test";
14+
fs::create_directories(temp_dir);
15+
16+
std::string config_content = R"(
17+
model:
18+
path: test_model.gguf
19+
n_ctx: 512
20+
sampling:
21+
seed: 123
22+
temp: 0.5
23+
prompt: "Test prompt"
24+
n_predict: 64
25+
simple_io: true
26+
)";
27+
28+
fs::path config_path = temp_dir / "test_config.yaml";
29+
std::ofstream config_file(config_path);
30+
config_file << config_content;
31+
config_file.close();
32+
33+
bool result = common_load_yaml_config(config_path.string(), params);
34+
assert(result);
35+
(void)result;
36+
37+
assert(params.model.path == (temp_dir / "test_model.gguf").string());
38+
assert(params.n_ctx == 512);
39+
assert(params.sampling.seed == 123);
40+
assert(params.sampling.temp == 0.5f);
41+
assert(params.prompt == "Test prompt");
42+
assert(params.n_predict == 64);
43+
assert(params.simple_io == true);
44+
45+
fs::remove_all(temp_dir);
46+
47+
std::cout << "test_minimal_config: PASSED\n";
48+
}
49+
50+
static void test_unknown_key_error() {
51+
common_params params;
52+
53+
fs::path temp_dir = fs::temp_directory_path() / "llama_test";
54+
fs::create_directories(temp_dir);
55+
56+
std::string config_content = R"(
57+
model:
58+
path: test_model.gguf
59+
unknown_key: "should fail"
60+
n_ctx: 512
61+
)";
62+
63+
fs::path config_path = temp_dir / "test_config.yaml";
64+
std::ofstream config_file(config_path);
65+
config_file << config_content;
66+
config_file.close();
67+
68+
bool threw_exception = false;
69+
try {
70+
common_load_yaml_config(config_path.string(), params);
71+
} catch (const std::invalid_argument & e) {
72+
threw_exception = true;
73+
std::string error_msg = e.what();
74+
assert(error_msg.find("Unknown YAML keys") != std::string::npos);
75+
assert(error_msg.find("valid keys are") != std::string::npos);
76+
}
77+
78+
assert(threw_exception);
79+
(void)threw_exception;
80+
81+
fs::remove_all(temp_dir);
82+
83+
std::cout << "test_unknown_key_error: PASSED\n";
84+
}
85+
86+
static void test_relative_path_resolution() {
87+
common_params params;
88+
89+
fs::path temp_dir = fs::temp_directory_path() / "llama_test";
90+
fs::path config_dir = temp_dir / "configs";
91+
fs::create_directories(config_dir);
92+
93+
std::string config_content = R"(
94+
model:
95+
path: ../models/test_model.gguf
96+
prompt_file: prompts/test.txt
97+
)";
98+
99+
fs::path config_path = config_dir / "test_config.yaml";
100+
std::ofstream config_file(config_path);
101+
config_file << config_content;
102+
config_file.close();
103+
104+
bool result = common_load_yaml_config(config_path.string(), params);
105+
assert(result);
106+
(void)result;
107+
108+
fs::path expected_model = temp_dir / "models" / "test_model.gguf";
109+
fs::path expected_prompt = config_dir / "prompts" / "test.txt";
110+
111+
assert(params.model.path == expected_model.lexically_normal().string());
112+
assert(params.prompt_file == expected_prompt.lexically_normal().string());
113+
114+
fs::remove_all(temp_dir);
115+
116+
std::cout << "test_relative_path_resolution: PASSED\n";
117+
}
118+
119+
int main() {
120+
try {
121+
test_minimal_config();
122+
test_unknown_key_error();
123+
test_relative_path_resolution();
124+
125+
std::cout << "All tests passed!\n";
126+
return 0;
127+
} catch (const std::exception & e) {
128+
std::cerr << "Test failed: " << e.what() << std::endl;
129+
return 1;
130+
}
131+
}

tests/test-yaml-parity.sh

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
LLAMA_CLI="./llama-cli"
6+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
7+
REPO_ROOT="$(dirname "$SCRIPT_DIR")"
8+
CONFIG_FILE="$REPO_ROOT/configs/minimal.yaml"
9+
MODEL_PATH="$REPO_ROOT/models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf"
10+
11+
if [ ! -f "$MODEL_PATH" ]; then
12+
echo "Model file not found: $MODEL_PATH"
13+
exit 1
14+
fi
15+
16+
if [ ! -f "$CONFIG_FILE" ]; then
17+
echo "Config file not found: $CONFIG_FILE"
18+
exit 1
19+
fi
20+
21+
echo "Running with YAML config..."
22+
YAML_OUTPUT=$($LLAMA_CLI --config "$CONFIG_FILE" -no-cnv 2>/dev/null | tail -n +2)
23+
24+
echo "Running with equivalent flags..."
25+
FLAGS_OUTPUT=$($LLAMA_CLI -m "$MODEL_PATH" -n 16 -s 42 -c 256 --temp 0.0 -p "Hello from YAML" --simple-io -no-cnv 2>/dev/null | tail -n +2)
26+
27+
if [ "$YAML_OUTPUT" = "$FLAGS_OUTPUT" ]; then
28+
echo "PARITY TEST PASSED: YAML and flags produce identical output"
29+
exit 0
30+
else
31+
echo "PARITY TEST FAILED: Outputs differ"
32+
echo "YAML output:"
33+
echo "$YAML_OUTPUT"
34+
echo "Flags output:"
35+
echo "$FLAGS_OUTPUT"
36+
exit 1
37+
fi

0 commit comments

Comments
 (0)