This repository was archived by the owner on Sep 4, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 3 files changed +39
-0
lines changed Expand file tree Collapse file tree 3 files changed +39
-0
lines changed Original file line number Diff line number Diff line change
1
+ import pytest
2
+
3
+ from vllm import LLM
4
+
5
+
6
+ def test_empty_prompt ():
7
+ llm = LLM (model = "gpt2" )
8
+ with pytest .raises (ValueError , match = 'Prompt cannot be empty' ):
9
+ llm .generate (["" ])
Original file line number Diff line number Diff line change
1
+ # imports for guided decoding tests
2
+ import re
3
+
4
+ import openai
5
+ import pytest
6
+
7
+ from ...utils import RemoteOpenAIServer
8
+
9
+
10
+ @pytest .mark .asyncio
11
+ async def test_empty_prompt ():
12
+ model_name = "gpt2"
13
+ server_args = ["--enforce-eager" ]
14
+ with RemoteOpenAIServer (model_name , server_args ) as remote_server :
15
+ client = remote_server .get_async_client ()
16
+
17
+ with pytest .raises (openai .BadRequestError ,
18
+ match = re .compile ('.+Prompt cannot be empty.+' )):
19
+ await client .completions .create (model = model_name ,
20
+ prompt = "" ,
21
+ max_tokens = 5 ,
22
+ temperature = 0.0 )
Original file line number Diff line number Diff line change @@ -591,6 +591,7 @@ def _add_processed_request(
591
591
prompt_adapter_request : Optional [PromptAdapterRequest ],
592
592
trace_headers : Optional [Mapping [str , str ]] = None ,
593
593
) -> None :
594
+ self ._validate_model_inputs (processed_inputs )
594
595
# Create the sequences.
595
596
block_size = self .cache_config .block_size
596
597
seq_id = next (self .seq_counter )
@@ -1647,3 +1648,10 @@ def is_encoder_decoder_model(self):
1647
1648
1648
1649
def is_embedding_model (self ):
1649
1650
return self .model_config .is_embedding_model
1651
+
1652
+ def _validate_model_inputs (self , inputs : Union [LLMInputs ,
1653
+ EncoderDecoderLLMInputs ]):
1654
+ prompt_key = "encoder_prompt_token_ids" \
1655
+ if self .is_encoder_decoder_model () else "prompt_token_ids"
1656
+ if not inputs .get (prompt_key ):
1657
+ raise ValueError ("Prompt cannot be empty" )
You can’t perform that action at this time.
0 commit comments