|
| 1 | +import os |
1 | 2 | from typing import List, Optional, Tuple
|
2 | 3 |
|
3 | 4 | import pytest
|
|
7 | 8 | from vllm import LLM, SamplingParams
|
8 | 9 | from vllm.transformers_utils.tokenizer import get_tokenizer
|
9 | 10 |
|
10 |
| -_TEST_PROMPTS = [ |
11 |
| - "vLLM is a high-throughput and memory-efficient inference and serving engine for LLMs.", |
12 |
| - "Briefly describe the major milestones in the development of artificial intelligence from 1950 to 2020.", |
13 |
| - "Compare and contrast artificial intelligence with human intelligence in terms of processing information.", |
14 |
| - "Describe the basic components of a neural network and how it can be trained.", |
15 |
| - "Write a short story about a robot that dreams for the first time.", |
16 |
| - "Analyze the impact of the COVID-19 pandemic on global economic structures and future business models.", |
17 |
| - "Explain the cultural significance of the Mona Lisa painting, and how its perception might vary in Western versus Eastern societies.", |
18 |
| - "Translate the following English sentence into Japanese, French, and Swahili: 'The early bird catches the worm.'", |
19 |
| -] |
| 11 | +_TEST_PROMPTS = ["prompts/example.txt"] |
| 12 | +_LONG_PROMPTS = ["prompts/summary.txt"] |
| 13 | + |
| 14 | + |
| 15 | +def _read_prompts(filename: str) -> str: |
| 16 | + prompts = [] |
| 17 | + with open(filename, "r") as f: |
| 18 | + prompt = f.readline() |
| 19 | + prompts.append(prompt) |
| 20 | + return prompts |
20 | 21 |
|
21 | 22 |
|
22 | 23 | @pytest.fixture
|
23 | 24 | def example_prompts() -> List[str]:
|
24 |
| - return _TEST_PROMPTS |
| 25 | + prompts = [] |
| 26 | + for filename in _TEST_PROMPTS: |
| 27 | + prompts += _read_prompts(os.path.join("tests", filename)) |
| 28 | + return prompts |
| 29 | + |
| 30 | + |
| 31 | +@pytest.fixture |
| 32 | +def example_long_prompts() -> List[str]: |
| 33 | + prompts = [] |
| 34 | + for filename in _LONG_PROMPTS: |
| 35 | + prompts += _read_prompts(os.path.join("tests", filename)) |
| 36 | + return prompts |
25 | 37 |
|
26 | 38 |
|
27 | 39 | _STR_DTYPE_TO_TORCH_DTYPE = {
|
|
0 commit comments