Skip to content

Commit 8a5a9b7

Browse files
[CI/Build] Update defaults for test reproducibility (#14893)
Signed-off-by: DarkLight1337 <[email protected]>
1 parent bb3aedd commit 8a5a9b7

File tree

1 file changed

+16
-2
lines changed

1 file changed

+16
-2
lines changed

tests/conftest.py

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -681,21 +681,34 @@ def hf_runner():
681681

682682

683683
class VllmRunner:
684+
"""
685+
The default value of some arguments have been modified from
686+
:class:`~vllm.LLM` as follows:
687+
- `trust_remote_code`: Set to `True` instead of `False` for convenience.
688+
- `seed`: Set to `0` instead of `None` for test reproducibility.
689+
- `max_model_len`: Set to `1024` instead of `None` to reduce memory usage.
690+
- `block_size`: Set to `16` instead of `None` to reduce memory usage.
691+
- `enable_chunked_prefill`: Set to `False` instead of `None` for
692+
test reproducibility.
693+
- `enforce_eager`: Set to `False` instead of `None` to test CUDA graph.
694+
"""
684695

685696
def __init__(
686697
self,
687698
model_name: str,
688699
task: TaskOption = "auto",
689700
tokenizer_name: Optional[str] = None,
690701
tokenizer_mode: str = "auto",
702+
trust_remote_code: bool = True,
703+
seed: Optional[int] = 0,
691704
# Use smaller max model length, otherwise bigger model cannot run due
692705
# to kv cache size limit.
693706
max_model_len: int = 1024,
694707
dtype: str = "half",
695708
disable_log_stats: bool = True,
696709
tensor_parallel_size: int = 1,
697710
block_size: int = 16,
698-
enable_chunked_prefill: bool = False,
711+
enable_chunked_prefill: Optional[bool] = False,
699712
swap_space: int = 4,
700713
enforce_eager: Optional[bool] = False,
701714
**kwargs,
@@ -705,8 +718,9 @@ def __init__(
705718
task=task,
706719
tokenizer=tokenizer_name,
707720
tokenizer_mode=tokenizer_mode,
708-
trust_remote_code=True,
721+
trust_remote_code=trust_remote_code,
709722
dtype=dtype,
723+
seed=seed,
710724
swap_space=swap_space,
711725
enforce_eager=enforce_eager,
712726
disable_log_stats=disable_log_stats,

0 commit comments

Comments
 (0)