Skip to content

Commit aefac1e

Browse files
author
ochafik
committed
tool-call: update scripts/fetch_server_test_models.py
1 parent b825440 commit aefac1e

File tree

2 files changed

+22
-4
lines changed

2 files changed

+22
-4
lines changed

examples/server/tests/README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,3 +62,10 @@ After changing logic in `steps.py`, ensure that `@bug` and `@wrong_usage` scenar
6262
```shell
6363
./tests.sh --no-skipped --tags bug,wrong_usage || echo "should failed but compile"
6464
```
65+
66+
Some tests (especially `@slow` ones) require model downloads. Since this can time out the tests, you can pre-download them in the cache ahead of time with:
67+
68+
```shell
69+
pip install -r examples/server/tests/requirements.txt
70+
python scripts/fetch_server_test_models.py
71+
```

scripts/fetch_server_test_models.py

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,13 @@
99
python scripts/fetch_server_test_models.py
1010
( cd examples/server/tests && ./tests.sh --tags=slow )
1111
'''
12-
import os
1312
from behave.parser import Parser
1413
import glob
15-
import re
14+
import os
1615
from pydantic import BaseModel
16+
import re
1717
import subprocess
18+
import sys
1819

1920

2021
class HuggingFaceModel(BaseModel):
@@ -60,8 +61,18 @@ def process_step(step):
6061
os.path.dirname(__file__),
6162
'../build/bin/Release/llama-cli.exe' if os.name == 'nt' else '../build/bin/llama-cli'))
6263

63-
for m in models:
64+
for m in sorted(list(models), key=lambda m: m.hf_repo):
6465
if '<' in m.hf_repo or '<' in m.hf_file:
6566
continue
67+
if '-of-' in m.hf_file:
68+
print(f'# Skipping model at {m.hf_repo} / {m.hf_file} because it is a split file', file=sys.stderr)
69+
continue
6670
print(f'# Ensuring model at {m.hf_repo} / {m.hf_file} is fetched')
67-
subprocess.check_call([cli_path, '-hfr', m.hf_repo, '-hff', m.hf_file, '-fa', '-n', '1', '-p', 'Hey', '--no-warmup'])
71+
cmd = [cli_path, '-hfr', m.hf_repo, '-hff', m.hf_file, '-n', '1', '-p', 'Hey', '--no-warmup', '--log-disable']
72+
if m.hf_file != 'tinyllamas/stories260K.gguf':
73+
cmd.append('-fa')
74+
try:
75+
subprocess.check_call(cmd)
76+
except subprocess.CalledProcessError:
77+
print(f'# Failed to fetch model at {m.hf_repo} / {m.hf_file} with command:\n {" ".join(cmd)}', file=sys.stderr)
78+
exit(1)

0 commit comments

Comments
 (0)