Skip to content

Commit 4c04d1f

Browse files
authored
Merge pull request #446 from Scale3-Labs/ali/fix-llm-config-autogen
handle `llm_config` different cases
2 parents 660de3e + 2b723a5 commit 4c04d1f

File tree

2 files changed

+33
-15
lines changed

2 files changed

+33
-15
lines changed

src/langtrace_python_sdk/instrumentation/autogen/patch.py

Lines changed: 32 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -56,12 +56,8 @@ def traced_method(wrapped, instance, args, kwargs):
5656
def patch_generate_reply(name, version, tracer: Tracer):
5757

5858
def traced_method(wrapped, instance, args, kwargs):
59-
6059
llm_config = instance.llm_config
61-
kwargs = {
62-
**kwargs,
63-
**llm_config.get("config_list")[0],
64-
}
60+
kwargs = parse_kwargs(kwargs, llm_config)
6561
service_provider = SERVICE_PROVIDERS["AUTOGEN"]
6662

6763
span_attributes = {
@@ -84,20 +80,25 @@ def traced_method(wrapped, instance, args, kwargs):
8480
try:
8581

8682
result = wrapped(*args, **kwargs)
87-
83+
8884
# if caching is disabled, return result as langtrace will instrument the rest.
89-
if "cache_seed" in llm_config and llm_config.get("cache_seed") is None:
85+
if (
86+
llm_config
87+
and "cache_seed" in llm_config
88+
and llm_config.get("cache_seed") is None
89+
):
9090
return result
9191

9292
set_span_attributes(span, attributes)
9393
set_event_completion(span, [{"role": "assistant", "content": result}])
94-
total_cost, response_model = list(instance.get_total_usage().keys())
95-
set_span_attribute(
96-
span, SpanAttributes.LLM_RESPONSE_MODEL, response_model
97-
)
98-
set_usage_attributes(
99-
span, instance.get_total_usage().get(response_model)
100-
)
94+
if llm_config:
95+
total_cost, response_model = list(instance.get_total_usage().keys())
96+
set_span_attribute(
97+
span, SpanAttributes.LLM_RESPONSE_MODEL, response_model
98+
)
99+
set_usage_attributes(
100+
span, instance.get_total_usage().get(response_model)
101+
)
101102

102103
return result
103104

@@ -130,3 +131,20 @@ def parse_agent(agent):
130131
"llm_config": str(getattr(agent, "llm_config", None)),
131132
"human_input_mode": getattr(agent, "human_input_mode", None),
132133
}
134+
135+
136+
def parse_kwargs(kwargs, llm_config):
137+
# Handle cases where llm_config is False or None
138+
if not llm_config:
139+
return kwargs
140+
141+
if isinstance(llm_config, dict) and "config_list" in llm_config:
142+
return {
143+
**kwargs,
144+
**llm_config.get("config_list")[0],
145+
}
146+
else:
147+
return {
148+
**kwargs,
149+
**llm_config,
150+
}
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "3.3.18"
1+
__version__ = "3.3.19"

0 commit comments

Comments
 (0)