|
5 | 5 | import sentry_sdk |
6 | 6 | from sentry_sdk.ai.monitoring import set_ai_pipeline_name |
7 | 7 | from sentry_sdk.ai.utils import set_data_normalized, get_start_span_function |
| 8 | +from sentry_sdk.ai.message_utils import truncate_and_serialize_messages |
8 | 9 | from sentry_sdk.consts import OP, SPANDATA |
9 | 10 | from sentry_sdk.integrations import DidNotEnable, Integration |
10 | 11 | from sentry_sdk.scope import should_send_default_pii |
@@ -209,9 +210,11 @@ def on_llm_start( |
209 | 210 | _set_tools_on_span(span, all_params.get("tools")) |
210 | 211 |
|
211 | 212 | if should_send_default_pii() and self.include_prompts: |
212 | | - set_data_normalized( |
213 | | - span, SPANDATA.GEN_AI_REQUEST_MESSAGES, prompts, unpack=False |
214 | | - ) |
| 213 | + result = truncate_and_serialize_messages(prompts) |
| 214 | + if result["serialized_data"]: |
| 215 | + span.set_data( |
| 216 | + SPANDATA.GEN_AI_REQUEST_MESSAGES, result["serialized_data"] |
| 217 | + ) |
215 | 218 |
|
216 | 219 | def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): |
217 | 220 | # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any |
@@ -262,12 +265,11 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): |
262 | 265 | normalized_messages.append( |
263 | 266 | self._normalize_langchain_message(message) |
264 | 267 | ) |
265 | | - set_data_normalized( |
266 | | - span, |
267 | | - SPANDATA.GEN_AI_REQUEST_MESSAGES, |
268 | | - normalized_messages, |
269 | | - unpack=False, |
270 | | - ) |
| 268 | + result = truncate_and_serialize_messages(normalized_messages) |
| 269 | + if result["serialized_data"]: |
| 270 | + span.set_data( |
| 271 | + SPANDATA.GEN_AI_REQUEST_MESSAGES, result["serialized_data"] |
| 272 | + ) |
271 | 273 |
|
272 | 274 | def on_chat_model_end(self, response, *, run_id, **kwargs): |
273 | 275 | # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any |
@@ -740,9 +742,11 @@ def new_invoke(self, *args, **kwargs): |
740 | 742 | and should_send_default_pii() |
741 | 743 | and integration.include_prompts |
742 | 744 | ): |
743 | | - set_data_normalized( |
744 | | - span, SPANDATA.GEN_AI_REQUEST_MESSAGES, [input], unpack=False |
745 | | - ) |
| 745 | + result = truncate_and_serialize_messages([input]) |
| 746 | + if result["serialized_data"]: |
| 747 | + span.set_data( |
| 748 | + SPANDATA.GEN_AI_REQUEST_MESSAGES, result["serialized_data"] |
| 749 | + ) |
746 | 750 |
|
747 | 751 | output = result.get("output") |
748 | 752 | if ( |
@@ -791,9 +795,11 @@ def new_stream(self, *args, **kwargs): |
791 | 795 | and should_send_default_pii() |
792 | 796 | and integration.include_prompts |
793 | 797 | ): |
794 | | - set_data_normalized( |
795 | | - span, SPANDATA.GEN_AI_REQUEST_MESSAGES, [input], unpack=False |
796 | | - ) |
| 798 | + result = truncate_and_serialize_messages([input]) |
| 799 | + if result["serialized_data"]: |
| 800 | + span.set_data( |
| 801 | + SPANDATA.GEN_AI_REQUEST_MESSAGES, result["serialized_data"] |
| 802 | + ) |
797 | 803 |
|
798 | 804 | # Run the agent |
799 | 805 | result = f(self, *args, **kwargs) |
|
0 commit comments