@@ -384,6 +384,42 @@ async def _build_content_and_config(
384384 )
385385 return contents , config
386386
387+ @staticmethod
388+ def _map_finish_reason_to_otel (raw : str | None ) -> str | None :
389+ """Map provider-specific finish reasons to OpenTelemetry gen_ai.response.finish_reasons values.
390+
391+ Only returns a value if it matches a known OTEL value; otherwise returns None.
392+ """
393+ if raw is None :
394+ return None
395+ upper = raw .upper ()
396+ # Known mappings for Google Gemini
397+ if upper == 'STOP' :
398+ return 'stop'
399+ if upper in {'MAX_TOKENS' , 'MAX_OUTPUT_TOKENS' }:
400+ return 'length'
401+ if upper in {'SAFETY' , 'BLOCKLIST' , 'PROHIBITED_CONTENT' , 'SPII' }:
402+ return 'content_filter'
403+ # Unknown or provider-specific value — do not set
404+ return None
405+
406+ def _finish_reason_details (
407+ self , finish_reason : Any , vendor_id : str | None
408+ ) -> tuple [dict [str , Any ] | None , str | None ]:
409+ """Build provider_details and mapped OTEL finish_reason from a provider finish reason.
410+
411+ Returns a tuple of (provider_details, mapped_finish_reason).
412+ """
413+ details : dict [str , Any ] = {}
414+ mapped_finish_reason : str | None = None
415+ if finish_reason is not None :
416+ raw_finish_reason = getattr (finish_reason , 'value' , str (finish_reason ))
417+ details ['finish_reason' ] = raw_finish_reason
418+ mapped_finish_reason = self ._map_finish_reason_to_otel (raw_finish_reason )
419+ if vendor_id :
420+ details ['provider_response_id' ] = vendor_id
421+ return (details or None ), mapped_finish_reason
422+
387423 def _process_response (self , response : GenerateContentResponse ) -> ModelResponse :
388424 if not response .candidates or len (response .candidates ) != 1 :
389425 raise UnexpectedModelBehavior ('Expected exactly one candidate in Gemini response' ) # pragma: no cover
@@ -397,10 +433,7 @@ def _process_response(self, response: GenerateContentResponse) -> ModelResponse:
397433 ) # pragma: no cover
398434 parts = candidate .content .parts or []
399435 vendor_id = response .response_id or None
400- vendor_details : dict [str , Any ] | None = None
401- finish_reason = candidate .finish_reason
402- if finish_reason : # pragma: no branch
403- vendor_details = {'finish_reason' : finish_reason .value }
436+ vendor_details , mapped_finish_reason = self ._finish_reason_details (candidate .finish_reason , vendor_id )
404437 usage = _metadata_as_usage (response )
405438 return _process_response_from_parts (
406439 parts ,
@@ -409,6 +442,7 @@ def _process_response(self, response: GenerateContentResponse) -> ModelResponse:
409442 usage ,
410443 vendor_id = vendor_id ,
411444 vendor_details = vendor_details ,
445+ finish_reason = mapped_finish_reason ,
412446 )
413447
414448 async def _process_streamed_response (
@@ -543,6 +577,11 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
543577
544578 assert chunk .candidates is not None
545579 candidate = chunk .candidates [0 ]
580+
581+ # Capture mapped finish_reason if provided by the candidate
582+ if self .finish_reason is None and candidate .finish_reason is not None :
583+ raw_fr = getattr (candidate .finish_reason , 'value' , str (candidate .finish_reason ))
584+ self .finish_reason = GoogleModel ._map_finish_reason_to_otel (raw_fr )
546585 if candidate .content is None or candidate .content .parts is None :
547586 if candidate .finish_reason == 'STOP' : # pragma: no cover
548587 # Normal completion - skip this chunk
@@ -625,6 +664,7 @@ def _process_response_from_parts(
625664 usage : usage .RequestUsage ,
626665 vendor_id : str | None ,
627666 vendor_details : dict [str , Any ] | None = None ,
667+ finish_reason : str | None = None ,
628668) -> ModelResponse :
629669 items : list [ModelResponsePart ] = []
630670 for part in parts :
@@ -665,6 +705,7 @@ def _process_response_from_parts(
665705 provider_response_id = vendor_id ,
666706 provider_details = vendor_details ,
667707 provider_name = provider_name ,
708+ finish_reason = finish_reason ,
668709 )
669710
670711
0 commit comments