Skip to content

Commit 7e8b89c

Browse files
committed
modify gemini
1 parent 4b8c070 commit 7e8b89c

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

spoon_ai/llm/providers/gemini_provider.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -311,12 +311,14 @@ async def chat_stream(self, messages: List[Message],callbacks: Optional[List] =
311311
usage_data = None
312312

313313
# Send streaming request
314+
# Filter out parameters that generate_content_stream doesn't accept
315+
filtered_kwargs = {k: v for k, v in kwargs.items()
316+
if k not in ['model', 'max_tokens', 'temperature', 'callbacks', 'timeout']}
314317
stream = self.client.models.generate_content_stream(
315318
model=model,
316319
contents=contents,
317320
config=generate_config,
318-
**{k: v for k, v in kwargs.items()
319-
if k not in ['model', 'max_tokens', 'temperature', 'callbacks']}
321+
**filtered_kwargs
320322
)
321323

322324
for part_response in stream:

0 commit comments

Comments
 (0)