|
4 | 4 |
|
5 | 5 | import sentry_sdk
|
6 | 6 | from sentry_sdk.ai.monitoring import set_ai_pipeline_name
|
7 |
| -from sentry_sdk.ai.utils import set_data_normalized, get_start_span_function |
| 7 | +from sentry_sdk.ai.utils import ( |
| 8 | + GEN_AI_ALLOWED_MESSAGE_ROLES, |
| 9 | + normalize_message_roles, |
| 10 | + set_data_normalized, |
| 11 | + get_start_span_function, |
| 12 | +) |
8 | 13 | from sentry_sdk.consts import OP, SPANDATA
|
9 | 14 | from sentry_sdk.integrations import DidNotEnable, Integration
|
10 | 15 | from sentry_sdk.scope import should_send_default_pii
|
@@ -209,8 +214,18 @@ def on_llm_start(
|
209 | 214 | _set_tools_on_span(span, all_params.get("tools"))
|
210 | 215 |
|
211 | 216 | if should_send_default_pii() and self.include_prompts:
|
| 217 | + normalized_messages = [ |
| 218 | + { |
| 219 | + "role": GEN_AI_ALLOWED_MESSAGE_ROLES.USER, |
| 220 | + "content": {"type": "text", "text": prompt}, |
| 221 | + } |
| 222 | + for prompt in prompts |
| 223 | + ] |
212 | 224 | set_data_normalized(
|
213 |
| - span, SPANDATA.GEN_AI_REQUEST_MESSAGES, prompts, unpack=False |
| 225 | + span, |
| 226 | + SPANDATA.GEN_AI_REQUEST_MESSAGES, |
| 227 | + normalized_messages, |
| 228 | + unpack=False, |
214 | 229 | )
|
215 | 230 |
|
216 | 231 | def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
|
@@ -262,6 +277,8 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
|
262 | 277 | normalized_messages.append(
|
263 | 278 | self._normalize_langchain_message(message)
|
264 | 279 | )
|
| 280 | + normalized_messages = normalize_message_roles(normalized_messages) |
| 281 | + |
265 | 282 | set_data_normalized(
|
266 | 283 | span,
|
267 | 284 | SPANDATA.GEN_AI_REQUEST_MESSAGES,
|
@@ -740,8 +757,12 @@ def new_invoke(self, *args, **kwargs):
|
740 | 757 | and should_send_default_pii()
|
741 | 758 | and integration.include_prompts
|
742 | 759 | ):
|
| 760 | + normalized_messages = normalize_message_roles([input]) |
743 | 761 | set_data_normalized(
|
744 |
| - span, SPANDATA.GEN_AI_REQUEST_MESSAGES, [input], unpack=False |
| 762 | + span, |
| 763 | + SPANDATA.GEN_AI_REQUEST_MESSAGES, |
| 764 | + normalized_messages, |
| 765 | + unpack=False, |
745 | 766 | )
|
746 | 767 |
|
747 | 768 | output = result.get("output")
|
@@ -791,8 +812,12 @@ def new_stream(self, *args, **kwargs):
|
791 | 812 | and should_send_default_pii()
|
792 | 813 | and integration.include_prompts
|
793 | 814 | ):
|
| 815 | + normalized_messages = normalize_message_roles([input]) |
794 | 816 | set_data_normalized(
|
795 |
| - span, SPANDATA.GEN_AI_REQUEST_MESSAGES, [input], unpack=False |
| 817 | + span, |
| 818 | + SPANDATA.GEN_AI_REQUEST_MESSAGES, |
| 819 | + normalized_messages, |
| 820 | + unpack=False, |
796 | 821 | )
|
797 | 822 |
|
798 | 823 | # Run the agent
|
|
0 commit comments