Skip to content
This repository was archived by the owner on Aug 5, 2025. It is now read-only.

Commit de701d1

Browse files
authored
Merge pull request #141 from Chainlit/willy/fix-lc-structured-output
fix: lc structured output json ser
2 parents 307dcbf + 03ae87c commit de701d1

File tree

4 files changed

+11
-4
lines changed

4 files changed

+11
-4
lines changed

literalai/callback/langchain_callback.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
from importlib.metadata import version
33
from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict, Union, cast
44

5+
from pydantic import BaseModel
6+
57
from literalai.helper import ensure_values_serializable
68
from literalai.observability.generation import (
79
ChatGeneration,
@@ -148,6 +150,8 @@ def process_content(self, content: Any, root=True):
148150
return [self._convert_message(m) for m in content]
149151
elif self._is_message(content):
150152
return self._convert_message(content)
153+
elif isinstance(content, BaseModel):
154+
return content.model_dump()
151155
elif isinstance(content, dict):
152156
processed_dict = {}
153157
for key, value in content.items():
@@ -186,7 +190,9 @@ def _build_llm_settings(
186190
}
187191

188192
# make sure there is no api key specification
189-
settings = {k: v for k, v in merged.items() if not k.endswith("_api_key")}
193+
settings = self.process_content(
194+
{k: v for k, v in merged.items() if not k.endswith("_api_key")}
195+
)
190196
model_keys = ["azure_deployment", "deployment_name", "model", "model_name"]
191197
model = next((settings[k] for k in model_keys if k in settings), None)
192198
tools = None
@@ -203,6 +209,7 @@ def _build_llm_settings(
203209
"RunnableParallel",
204210
"RunnableAssign",
205211
"RunnableLambda",
212+
"structured_outputs_parser",
206213
"<lambda>",
207214
]
208215
DEFAULT_TO_KEEP = ["retriever", "llm", "agent", "chain", "tool"]

literalai/event_processor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ class EventProcessor:
3232
batch_timeout: float = 5.0
3333

3434
def __init__(self, api: "LiteralAPI", batch_size: int = 1, disabled: bool = False):
35+
self.stop_event = threading.Event()
3536
self.batch_size = batch_size
3637
self.api = api
3738
self.event_queue = queue.Queue()
@@ -44,7 +45,6 @@ def __init__(self, api: "LiteralAPI", batch_size: int = 1, disabled: bool = Fals
4445
)
4546
if not self.disabled:
4647
self.processing_thread.start()
47-
self.stop_event = threading.Event()
4848

4949
def add_event(self, event: "StepDict"):
5050
with self.counter_lock:

literalai/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.0.628"
1+
__version__ = "0.0.629"

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
setup(
44
name="literalai",
5-
version="0.0.628", # update version in literalai/version.py
5+
version="0.0.629", # update version in literalai/version.py
66
description="An SDK for observability in Python applications",
77
long_description=open("README.md").read(),
88
long_description_content_type="text/markdown",

0 commit comments

Comments
 (0)