5050 LITELLM_ROUTER_INSTANCE_KEY ,
5151)
5252
53+ LITELLM_METADATA_CHAT_KEYS = (
54+ "timeout" ,
55+ "temperature" ,
56+ "top_p" ,
57+ "n" ,
58+ "stream" ,
59+ "stream_options" ,
60+ "stop" ,
61+ "max_completion_tokens" ,
62+ "max_tokens" ,
63+ "modalities" ,
64+ "prediction" ,
65+ "presence_penalty" ,
66+ "frequency_penalty" ,
67+ "logit_bias" ,
68+ "user" ,
69+ "response_format" ,
70+ "seed" ,
71+ "tool_choice" ,
72+ "parallel_tool_calls" ,
73+ "logprobs" ,
74+ "top_logprobs" ,
75+ "deployment_id" ,
76+ "reasoning_effort" ,
77+ "base_url" ,
78+ "api_base" ,
79+ "api_version" ,
80+ "model_list" ,
81+ )
82+ LITELLM_METADATA_COMPLETION_KEYS = (
83+ "best_of" ,
84+ "echo" ,
85+ "frequency_penalty" ,
86+ "logit_bias" ,
87+ "logprobs" ,
88+ "max_tokens" ,
89+ "n" ,
90+ "presence_penalty" ,
91+ "stop" ,
92+ "stream" ,
93+ "stream_options" ,
94+ "suffix" ,
95+ "temperature" ,
96+ "top_p" ,
97+ "user" ,
98+ "api_base" ,
99+ "api_version" ,
100+ "model_list" ,
101+ "custom_llm_provider" ,
102+ )
103+
53104
54105def extract_model_name_google (instance , model_name_attr ):
55106 """Extract the model name from the instance.
@@ -299,12 +350,14 @@ def get_messages_from_converse_content(role: str, content: list):
299350 return messages
300351
301352
302- def openai_set_meta_tags_from_completion (span : Span , kwargs : Dict [str , Any ], completions : Any ) -> None :
353+ def openai_set_meta_tags_from_completion (
354+ span : Span , kwargs : Dict [str , Any ], completions : Any , integration_name : str = "openai"
355+ ) -> None :
303356 """Extract prompt/response tags from a completion and set them as temporary "_ml_obs.meta.*" tags."""
304357 prompt = kwargs .get ("prompt" , "" )
305358 if isinstance (prompt , str ):
306359 prompt = [prompt ]
307- parameters = { k : v for k , v in kwargs . items () if k not in OPENAI_SKIPPED_COMPLETION_TAGS }
360+ parameters = get_metadata_from_kwargs ( kwargs , integration_name , "completion" )
308361 output_messages = [{"content" : "" }]
309362 if not span .error and completions :
310363 choices = getattr (completions , "choices" , completions )
@@ -318,15 +371,17 @@ def openai_set_meta_tags_from_completion(span: Span, kwargs: Dict[str, Any], com
318371 )
319372
320373
321- def openai_set_meta_tags_from_chat (span : Span , kwargs : Dict [str , Any ], messages : Optional [Any ]) -> None :
374+ def openai_set_meta_tags_from_chat (
375+ span : Span , kwargs : Dict [str , Any ], messages : Optional [Any ], integration_name : str = "openai"
376+ ) -> None :
322377 """Extract prompt/response tags from a chat completion and set them as temporary "_ml_obs.meta.*" tags."""
323378 input_messages = []
324379 for m in kwargs .get ("messages" , []):
325380 tool_call_id = m .get ("tool_call_id" )
326381 if tool_call_id :
327382 core .dispatch (DISPATCH_ON_TOOL_CALL_OUTPUT_USED , (tool_call_id , span ))
328383 input_messages .append ({"content" : str (_get_attr (m , "content" , "" )), "role" : str (_get_attr (m , "role" , "" ))})
329- parameters = { k : v for k , v in kwargs . items () if k not in OPENAI_SKIPPED_CHAT_TAGS }
384+ parameters = get_metadata_from_kwargs ( kwargs , integration_name , "chat" )
330385 span ._set_ctx_items ({INPUT_MESSAGES : input_messages , METADATA : parameters })
331386
332387 if span .error or not messages :
@@ -398,6 +453,19 @@ def openai_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages:
398453 span ._set_ctx_item (OUTPUT_MESSAGES , output_messages )
399454
400455
456+ def get_metadata_from_kwargs (
457+ kwargs : Dict [str , Any ], integration_name : str = "openai" , operation : str = "chat"
458+ ) -> Dict [str , Any ]:
459+ metadata = {}
460+ if integration_name == "openai" :
461+ keys_to_skip = OPENAI_SKIPPED_CHAT_TAGS if operation == "chat" else OPENAI_SKIPPED_COMPLETION_TAGS
462+ metadata = {k : v for k , v in kwargs .items () if k not in keys_to_skip }
463+ elif integration_name == "litellm" :
464+ keys_to_include = LITELLM_METADATA_CHAT_KEYS if operation == "chat" else LITELLM_METADATA_COMPLETION_KEYS
465+ metadata = {k : v for k , v in kwargs .items () if k in keys_to_include }
466+ return metadata
467+
468+
401469def openai_get_input_messages_from_response_input (
402470 messages : Optional [Union [str , List [Dict [str , Any ]]]]
403471) -> List [Dict [str , Any ]]:
0 commit comments