@@ -275,6 +275,8 @@ class Joke(BaseModel):
275
275
"""
276
276
xai_api_base : str = Field (default = "https://api.x.ai/v1/" )
277
277
"""Base URL path for API requests."""
278
+ search_parameters : Optional [dict [str , Any ]] = None
279
+ """Parameters for search requests. Example: ``{"mode": "auto"}``."""
278
280
279
281
openai_api_key : Optional [SecretStr ] = None
280
282
openai_api_base : Optional [str ] = None
@@ -371,6 +373,18 @@ def validate_environment(self) -> Self:
371
373
)
372
374
return self
373
375
376
+ @property
377
+ def _default_params (self ) -> dict [str , Any ]:
378
+ """Get default parameters."""
379
+ params = super ()._default_params
380
+ if self .search_parameters :
381
+ if "extra_body" in params :
382
+ params ["extra_body" ]["search_parameters" ] = self .search_parameters
383
+ else :
384
+ params ["extra_body" ] = {"search_parameters" : self .search_parameters }
385
+
386
+ return params
387
+
374
388
def _create_chat_result (
375
389
self ,
376
390
response : Union [dict , openai .BaseModel ],
@@ -386,6 +400,11 @@ def _create_chat_result(
386
400
response .choices [0 ].message .reasoning_content # type: ignore
387
401
)
388
402
403
+ if hasattr (response , "citations" ):
404
+ rtn .generations [0 ].message .additional_kwargs ["citations" ] = (
405
+ response .citations
406
+ )
407
+
389
408
return rtn
390
409
391
410
def _convert_chunk_to_generation_chunk (
@@ -407,6 +426,10 @@ def _convert_chunk_to_generation_chunk(
407
426
reasoning_content
408
427
)
409
428
429
+ if (citations := chunk .get ("citations" )) and generation_chunk :
430
+ if isinstance (generation_chunk .message , AIMessageChunk ):
431
+ generation_chunk .message .additional_kwargs ["citations" ] = citations
432
+
410
433
return generation_chunk
411
434
412
435
def with_structured_output (
0 commit comments