Skip to content

Commit fd6268b

Browse files
fix: guard against None model_name in OpenAI endpoint (#1700) (#2358)
Fix AttributeError: 'NoneType' object has no attribute 'lower' that occurs when model_name is None (common with Azure OpenAI deployments). The issue manifests when a None model_name propagates to langchain's standardize_model_name() which calls .lower() on it without null-checking. Changes: - handle_wrapped_call: Check bindings.arguments as fallback, fall back to response.model when model is None - _handle_response: Guard model_name to ensure it is always a string - handle_generation_chunk: Guard response.model with 'or empty string' - OpenAICostComputer.handle_response: Guard first_chunk.model Add 11 unit tests covering both non-OTEL and OTEL code paths. Co-authored-by: sfc-gh-jreini <sfc-gh-jreini@users.noreply.github.com>
1 parent 0e2964b commit fd6268b

File tree

2 files changed

+491
-2
lines changed

2 files changed

+491
-2
lines changed

src/providers/openai/trulens/providers/openai/endpoint.py

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def handle_response(response: Any) -> Dict[str, Any]:
129129
if hasattr(response, "__iter__") and not hasattr(response, "model"):
130130
try:
131131
first_chunk = next(response)
132-
model_name = first_chunk.model
132+
model_name = first_chunk.model or ""
133133
response = prepend_first_chunk(response, first_chunk)
134134
except Exception:
135135
logger.exception(
@@ -331,7 +331,8 @@ def handle_generation_chunk(self, response: Any) -> None:
331331
if choice.finish_reason == "stop":
332332
llm_result = LLMResult(
333333
llm_output=dict(
334-
token_usage={}, model_name=response.model
334+
token_usage={},
335+
model_name=response.model or "",
335336
),
336337
generations=[self.chunks],
337338
)
@@ -455,6 +456,15 @@ def handle_wrapped_call(
455456
model_name = ""
456457
if "model" in bindings.kwargs:
457458
model_name = bindings.kwargs["model"]
459+
elif "model" in bindings.arguments:
460+
model_name = bindings.arguments["model"]
461+
462+
# Guard against None model_name (e.g. Azure OpenAI
463+
# deployments may not set a model parameter). Fall back to
464+
# response.model if available, otherwise default to "".
465+
if model_name is None:
466+
model_name = getattr(response, "model", None) or ""
467+
458468
callbacks = [self.global_callback]
459469
if callback is not None:
460470
callbacks.append(callback)
@@ -474,6 +484,11 @@ def _handle_response(
474484
# types of calls being handled here, we need to make various checks to
475485
# see what sort of data to process based on the call made.
476486

487+
# Ensure model_name is always a string to prevent
488+
# downstream crashes in langchain's standardize_model_name.
489+
if model_name is None:
490+
model_name = ""
491+
477492
# Generic lazy value should have already been taken care of by base Endpoint class.
478493
assert not python_utils.is_lazy(response)
479494

0 commit comments

Comments
 (0)