We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6153baa commit 6c7cec0Copy full SHA for 6c7cec0
llama_cpp/server/__main__.py
@@ -76,7 +76,7 @@ class CreateCompletionRequest(BaseModel):
76
temperature: float = 0.8
77
top_p: float = 0.95
78
echo: bool = False
79
- stop: List[str] = []
+ stop: Optional[List[str]] = []
80
stream: bool = False
81
82
# ignored or currently unsupported
@@ -173,7 +173,7 @@ class CreateChatCompletionRequest(BaseModel):
173
174
175
176
177
max_tokens: int = 128
178
179
0 commit comments