Skip to content

Commit cb54644

Browse files
fix(braintrust_logging.py): filter metadata before logging
avoid unserializable json
1 parent ccb7ce0 commit cb54644

File tree

3 files changed

+100
-15
lines changed

3 files changed

+100
-15
lines changed

litellm/integrations/braintrust_logging.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
import litellm
1313
from litellm import verbose_logger
1414
from litellm.integrations.custom_logger import CustomLogger
15+
from litellm.litellm_core_utils.safe_json_dumps import filter_json_serializable
1516
from litellm.llms.custom_httpx.http_handler import (
1617
HTTPHandler,
1718
get_async_httpx_client,
@@ -45,9 +46,9 @@ def __init__(
4546
"Authorization": "Bearer " + self.api_key,
4647
"Content-Type": "application/json",
4748
}
48-
self._project_id_cache: Dict[
49-
str, str
50-
] = {} # Cache mapping project names to IDs
49+
self._project_id_cache: Dict[str, str] = (
50+
{}
51+
) # Cache mapping project names to IDs
5152
self.global_braintrust_http_handler = get_async_httpx_client(
5253
llm_provider=httpxSpecialProvider.LoggingCallback
5354
)
@@ -276,7 +277,7 @@ def log_success_event( # noqa: PLR0915
276277

277278
# Allow metadata override for span name
278279
span_name = metadata.get("span_name", "Chat Completion")
279-
280+
280281
request_data = {
281282
"id": litellm_call_id,
282283
"input": prompt["messages"],
@@ -431,12 +432,12 @@ async def async_log_success_event( # noqa: PLR0915
431432

432433
# Allow metadata override for span name
433434
span_name = metadata.get("span_name", "Chat Completion")
434-
435+
435436
request_data = {
436437
"id": litellm_call_id,
437438
"input": prompt["messages"],
438439
"output": output,
439-
"metadata": clean_metadata,
440+
"metadata": filter_json_serializable(clean_metadata),
440441
"tags": tags,
441442
"span_attributes": {"name": span_name, "type": "llm"},
442443
}

litellm/litellm_core_utils/safe_json_dumps.py

Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import json
22
from typing import Any, Union
3+
34
from litellm.constants import DEFAULT_MAX_RECURSE_DEPTH
45

56

@@ -49,3 +50,94 @@ def _serialize(obj: Any, seen: set, depth: int) -> Any:
4950

5051
safe_data = _serialize(data, set(), 0)
5152
return json.dumps(safe_data, default=str)
53+
54+
55+
def filter_json_serializable(
56+
data: Any, max_depth: int = DEFAULT_MAX_RECURSE_DEPTH
57+
) -> Any:
58+
"""
59+
Recursively filter data to only include JSON serializable items.
60+
Non-serializable items are completely skipped (not included in the result).
61+
"""
62+
63+
def _is_json_serializable(obj: Any) -> bool:
64+
"""Test if an object is JSON serializable."""
65+
try:
66+
json.dumps(obj)
67+
return True
68+
except (TypeError, ValueError):
69+
return False
70+
71+
def _filter(obj: Any, seen: set, depth: int) -> Any:
72+
# Check for maximum depth.
73+
if depth > max_depth:
74+
return None
75+
76+
# Base-case: if it is a primitive, test if it's serializable
77+
if isinstance(obj, (str, int, float, bool, type(None))):
78+
return obj if _is_json_serializable(obj) else None
79+
80+
# Check for circular reference.
81+
if id(obj) in seen:
82+
return None
83+
84+
seen.add(id(obj))
85+
86+
try:
87+
if isinstance(obj, dict):
88+
result = {}
89+
for k, v in obj.items():
90+
# Only include keys that are strings and values that are serializable
91+
if isinstance(k, str):
92+
filtered_value = _filter(v, seen, depth + 1)
93+
# Only add the key-value pair if the value is serializable
94+
if filtered_value is not None or v is None:
95+
if _is_json_serializable(filtered_value):
96+
result[k] = filtered_value
97+
seen.remove(id(obj))
98+
return result
99+
100+
elif isinstance(obj, list):
101+
result = []
102+
for item in obj:
103+
filtered_item = _filter(item, seen, depth + 1)
104+
# Only include items that are serializable
105+
if filtered_item is not None or item is None:
106+
if _is_json_serializable(filtered_item):
107+
result.append(filtered_item)
108+
seen.remove(id(obj))
109+
return result
110+
111+
elif isinstance(obj, tuple):
112+
filtered_items = []
113+
for item in obj:
114+
filtered_item = _filter(item, seen, depth + 1)
115+
# Only include items that are serializable
116+
if filtered_item is not None or item is None:
117+
if _is_json_serializable(filtered_item):
118+
filtered_items.append(filtered_item)
119+
seen.remove(id(obj))
120+
return tuple(filtered_items)
121+
122+
elif isinstance(obj, set):
123+
filtered_items = []
124+
for item in obj:
125+
filtered_item = _filter(item, seen, depth + 1)
126+
# Only include items that are serializable
127+
if filtered_item is not None or item is None:
128+
if _is_json_serializable(filtered_item):
129+
filtered_items.append(filtered_item)
130+
seen.remove(id(obj))
131+
return sorted(filtered_items)
132+
133+
else:
134+
# Test if the object is directly serializable
135+
seen.remove(id(obj))
136+
return obj if _is_json_serializable(obj) else None
137+
138+
except Exception:
139+
if id(obj) in seen:
140+
seen.remove(id(obj))
141+
return None
142+
143+
return _filter(data, set(), 0)

litellm/proxy/_new_secret_config.yaml

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,4 @@ router_settings:
1919

2020
litellm_settings:
2121
callbacks: ["otel"]
22-
cache: true
23-
cache_params:
24-
type: redis
25-
ttl: 600
26-
supported_call_types: ["acompletion", "completion"]
27-
28-
model_group_settings:
29-
forward_client_headers_to_llm_api:
30-
- fake-openai-endpoint
22+
success_callback: ["braintrust"]

0 commit comments

Comments
 (0)