File tree Expand file tree Collapse file tree 1 file changed +4
-2
lines changed
logfire/_internal/integrations/llm_providers Expand file tree Collapse file tree 1 file changed +4
-2
lines changed Original file line number Diff line number Diff line change 1010from openai .types .create_embedding_response import CreateEmbeddingResponse
1111from openai .types .images_response import ImagesResponse
1212
13+ from ...utils import handle_internal_errors
1314from .types import EndpointConfig , StreamState
1415
1516if TYPE_CHECKING :
@@ -113,18 +114,19 @@ def get_response_data(self) -> Any:
113114 OpenaiChatCompletionStreamState = OpenaiCompletionStreamState # type: ignore
114115
115116
117+ @handle_internal_errors
116118def on_response (response : ResponseT , span : LogfireSpan ) -> ResponseT :
117119 """Updates the span based on the type of response."""
118120 if isinstance (response , LegacyAPIResponse ): # pragma: no cover
119121 on_response (response .parse (), span ) # type: ignore
120122 return cast ('ResponseT' , response )
121123
122- if isinstance (response , ChatCompletion ):
124+ if isinstance (response , ChatCompletion ) and response . choices :
123125 span .set_attribute (
124126 'response_data' ,
125127 {'message' : response .choices [0 ].message , 'usage' : response .usage },
126128 )
127- elif isinstance (response , Completion ):
129+ elif isinstance (response , Completion ) and response . choices :
128130 first_choice = response .choices [0 ]
129131 span .set_attribute (
130132 'response_data' ,
You can’t perform that action at this time.
0 commit comments