|
1 | 1 | from functools import wraps |
2 | | -import json |
3 | 2 |
|
4 | 3 | import sentry_sdk |
5 | 4 | from sentry_sdk import consts |
@@ -223,6 +222,16 @@ def _set_response_data(span, response, kwargs, integration): |
223 | 222 | safe_serialize(response_text), |
224 | 223 | ) |
225 | 224 |
|
| 225 | + elif hasattr(response, "output"): |
| 226 | + if should_send_default_pii() and integration.include_prompts: |
| 227 | + response_text = [item.to_dict() for item in response.output] |
| 228 | + if len(response_text) > 0: |
| 229 | + set_data_normalized( |
| 230 | + span, |
| 231 | + SPANDATA.GEN_AI_RESPONSE_TEXT, |
| 232 | + safe_serialize(response_text), |
| 233 | + ) |
| 234 | + |
226 | 235 | elif hasattr(response, "_iterator"): |
227 | 236 | data_buf: list[list[str]] = [] # one for each choice |
228 | 237 |
|
@@ -499,39 +508,20 @@ def _new_responses_create_common(f, *args, **kwargs): |
499 | 508 | return f(*args, **kwargs) |
500 | 509 |
|
501 | 510 | model = kwargs.get("model") |
502 | | - input = kwargs.get("input") |
| 511 | + operation = "responses" |
503 | 512 |
|
504 | | - span = sentry_sdk.start_span( |
| 513 | + with sentry_sdk.start_span( |
505 | 514 | op=consts.OP.GEN_AI_RESPONSES, |
506 | | - name=f"responses {model}", |
| 515 | + name=f"{operation} {model}", |
507 | 516 | origin=OpenAIIntegration.origin, |
508 | | - ) |
509 | | - span.__enter__() |
510 | | - |
511 | | - set_data_normalized(span, SPANDATA.GEN_AI_SYSTEM, "openai") |
512 | | - set_data_normalized(span, SPANDATA.GEN_AI_REQUEST_MODEL, model) |
513 | | - set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, "responses") |
514 | | - |
515 | | - if should_send_default_pii() and integration.include_prompts: |
516 | | - set_data_normalized(span, SPANDATA.GEN_AI_REQUEST_MESSAGES, input) |
517 | | - |
518 | | - res = yield f, args, kwargs |
519 | | - |
520 | | - if hasattr(res, "output"): |
521 | | - if should_send_default_pii() and integration.include_prompts: |
522 | | - set_data_normalized( |
523 | | - span, |
524 | | - SPANDATA.GEN_AI_RESPONSE_TEXT, |
525 | | - json.dumps([item.to_dict() for item in res.output]), |
526 | | - ) |
527 | | - _calculate_token_usage([], res, span, None, integration.count_tokens) |
| 517 | + ) as span: |
| 518 | + _set_request_data(span, kwargs, operation, integration) |
528 | 519 |
|
529 | | - else: |
530 | | - set_data_normalized(span, "unknown_response", True) |
| 520 | + response = yield f, args, kwargs |
531 | 521 |
|
532 | | - span.__exit__(None, None, None) |
| 522 | + _set_response_data(span, response, kwargs, integration) |
533 | 523 |
|
534 | | - return res |
| 524 | + return response |
535 | 525 |
|
536 | 526 |
|
537 | 527 | def _wrap_responses_create(f): |
|
0 commit comments