Skip to content
This repository was archived by the owner on Aug 5, 2025. It is now read-only.

Commit b1cb490

Browse files
committed
feat: add reasoning token support to lc
1 parent 0efad4e commit b1cb490

File tree

1 file changed

+64
-7
lines changed

1 file changed

+64
-7
lines changed

literalai/callback/langchain_callback.py

Lines changed: 64 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,33 @@ def _convert_message_dict(
9292
if function_call:
9393
msg["function_call"] = function_call
9494
else:
95-
msg["content"] = kwargs.get("content", "")
95+
content = kwargs.get("content")
96+
if isinstance(content, list):
97+
tool_calls = []
98+
content_parts = []
99+
for item in content:
100+
if item.get("type") == "tool_use":
101+
tool_calls.append(
102+
{
103+
"id": item.get("id"),
104+
"type": "function",
105+
"function": {
106+
"name": item.get("name"),
107+
"arguments": item.get("input"),
108+
},
109+
}
110+
)
111+
elif item.get("type") == "text":
112+
content_parts.append(
113+
{"type": "text", "text": item.get("text")}
114+
)
115+
116+
if tool_calls:
117+
msg["tool_calls"] = tool_calls
118+
if content_parts:
119+
msg["content"] = content_parts # type: ignore
120+
else:
121+
msg["content"] = content # type: ignore
96122

97123
if tool_calls:
98124
msg["tool_calls"] = tool_calls
@@ -123,7 +149,32 @@ def _convert_message(
123149
if function_call:
124150
msg["function_call"] = function_call
125151
else:
126-
msg["content"] = message.content # type: ignore
152+
if isinstance(message.content, list):
153+
tool_calls = []
154+
content_parts = []
155+
for item in message.content:
156+
if item.get("type") == "tool_use":
157+
tool_calls.append(
158+
{
159+
"id": item.get("id"),
160+
"type": "function",
161+
"function": {
162+
"name": item.get("name"),
163+
"arguments": item.get("input"),
164+
},
165+
}
166+
)
167+
elif item.get("type") == "text":
168+
content_parts.append(
169+
{"type": "text", "text": item.get("text")}
170+
)
171+
172+
if tool_calls:
173+
msg["tool_calls"] = tool_calls
174+
if content_parts:
175+
msg["content"] = content_parts # type: ignore
176+
else:
177+
msg["content"] = message.content # type: ignore
127178

128179
if tool_calls:
129180
msg["tool_calls"] = tool_calls
@@ -201,7 +252,12 @@ def _build_llm_settings(
201252
{"type": "function", "function": f} for f in settings["functions"]
202253
]
203254
if "tools" in settings:
204-
tools = settings["tools"]
255+
tools = [
256+
{"type": "function", "function": t}
257+
if t.get("type") != "function"
258+
else t
259+
for t in settings["tools"]
260+
]
205261
return provider, model, tools, settings
206262

207263
DEFAULT_TO_IGNORE = [
@@ -411,7 +467,9 @@ def _start_trace(self, run: Run) -> None:
411467
)
412468
step.tags = run.tags
413469
step.metadata = run.metadata
414-
step.input = self.process_content(run.inputs)
470+
471+
if step.type != "llm":
472+
step.input = self.process_content(run.inputs)
415473

416474
self.steps[str(run.id)] = step
417475

@@ -484,7 +542,6 @@ def _on_run_update(self, run: Run) -> None:
484542
if v is not None
485543
}
486544

487-
current_step.output = message_completion
488545
else:
489546
completion_start = self.completion_generations[str(run.id)]
490547
duration = time.time() - completion_start["start"]
@@ -509,7 +566,6 @@ def _on_run_update(self, run: Run) -> None:
509566
output_token_count=usage_metadata.get("output_tokens"),
510567
token_count=usage_metadata.get("total_tokens"),
511568
)
512-
current_step.output = {"content": completion}
513569

514570
if current_step:
515571
if current_step.metadata is None:
@@ -521,7 +577,8 @@ def _on_run_update(self, run: Run) -> None:
521577
outputs = run.outputs or {}
522578

523579
if current_step:
524-
current_step.output = self.process_content(outputs)
580+
if current_step.type != "llm":
581+
current_step.output = self.process_content(outputs)
525582
current_step.end()
526583

527584
def _on_error(self, error: BaseException, *, run_id: "UUID", **kwargs: Any):

0 commit comments

Comments
 (0)