1
1
from functools import wraps
2
2
3
- from sentry_sdk import consts
4
- from sentry_sdk .ai .monitoring import record_token_usage
5
- from sentry_sdk .ai .utils import set_data_normalized
6
- from sentry_sdk .consts import SPANDATA
7
3
8
4
from typing import Any , Iterable , Callable
9
5
10
6
import sentry_sdk
11
- from sentry_sdk .scope import should_send_default_pii
7
+ from sentry_sdk .ai .monitoring import record_token_usage
8
+ from sentry_sdk .ai .utils import set_data_normalized
9
+ from sentry_sdk .consts import OP , SPANDATA
12
10
from sentry_sdk .integrations import DidNotEnable , Integration
11
+ from sentry_sdk .scope import should_send_default_pii
13
12
from sentry_sdk .utils import (
14
13
capture_internal_exceptions ,
15
14
event_from_exception ,
@@ -34,6 +33,8 @@ def __init__(self, include_prompts=True):
34
33
@staticmethod
35
34
def setup_once ():
36
35
# type: () -> None
36
+
37
+ # Other tasks that can be called: https://huggingface.co/docs/huggingface_hub/guides/inference#supported-providers-and-tasks
37
38
huggingface_hub .inference ._client .InferenceClient .text_generation = (
38
39
_wrap_text_generation (
39
40
huggingface_hub .inference ._client .InferenceClient .text_generation
@@ -70,15 +71,22 @@ def new_text_generation(*args, **kwargs):
70
71
# invalid call, let it return error
71
72
return f (* args , ** kwargs )
72
73
73
- model = kwargs .get ("model" )
74
+ client = args [0 ]
75
+ model = client .model or kwargs .get ("model" ) or ""
74
76
streaming = kwargs .get ("stream" )
75
77
76
78
span = sentry_sdk .start_span (
77
- op = consts . OP .HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE ,
78
- name = "Text Generation " ,
79
+ op = OP .GEN_AI_GENERATE_TEXT ,
80
+ name = f"generate_text { model } " ,
79
81
origin = HuggingfaceHubIntegration .origin ,
80
82
)
81
83
span .__enter__ ()
84
+
85
+ span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "generate_text" )
86
+ if model :
87
+ span .set_data (SPANDATA .GEN_AI_REQUEST_MODEL , model )
88
+ span .set_data (SPANDATA .GEN_AI_SYSTEM , "TODO!!!!!" )
89
+
82
90
try :
83
91
res = f (* args , ** kwargs )
84
92
except Exception as e :
@@ -88,16 +96,15 @@ def new_text_generation(*args, **kwargs):
88
96
89
97
with capture_internal_exceptions ():
90
98
if should_send_default_pii () and integration .include_prompts :
91
- set_data_normalized (span , SPANDATA .AI_INPUT_MESSAGES , prompt )
99
+ set_data_normalized (span , SPANDATA .GEN_AI_REQUEST_MESSAGES , prompt )
92
100
93
- set_data_normalized (span , SPANDATA .AI_MODEL_ID , model )
94
- set_data_normalized (span , SPANDATA .AI_STREAMING , streaming )
101
+ span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , streaming )
95
102
96
103
if isinstance (res , str ):
97
104
if should_send_default_pii () and integration .include_prompts :
98
105
set_data_normalized (
99
106
span ,
100
- SPANDATA .AI_RESPONSES ,
107
+ SPANDATA .GEN_AI_RESPONSE_TEXT ,
101
108
[res ],
102
109
)
103
110
span .__exit__ (None , None , None )
@@ -107,7 +114,7 @@ def new_text_generation(*args, **kwargs):
107
114
if should_send_default_pii () and integration .include_prompts :
108
115
set_data_normalized (
109
116
span ,
110
- SPANDATA .AI_RESPONSES ,
117
+ SPANDATA .GEN_AI_RESPONSE_TEXT ,
111
118
[res .generated_text ],
112
119
)
113
120
if res .details is not None and res .details .generated_tokens > 0 :
@@ -120,7 +127,6 @@ def new_text_generation(*args, **kwargs):
120
127
121
128
if not isinstance (res , Iterable ):
122
129
# we only know how to deal with strings and iterables, ignore
123
- set_data_normalized (span , "unknown_response" , True )
124
130
span .__exit__ (None , None , None )
125
131
return res
126
132
@@ -145,7 +151,7 @@ def new_details_iterator():
145
151
and integration .include_prompts
146
152
):
147
153
set_data_normalized (
148
- span , SPANDATA .AI_RESPONSES , "" .join (data_buf )
154
+ span , SPANDATA .GEN_AI_RESPONSE_TEXT , "" .join (data_buf )
149
155
)
150
156
if tokens_used > 0 :
151
157
record_token_usage (
@@ -172,7 +178,7 @@ def new_iterator():
172
178
and integration .include_prompts
173
179
):
174
180
set_data_normalized (
175
- span , SPANDATA .AI_RESPONSES , "" .join (data_buf )
181
+ span , SPANDATA .GEN_AI_RESPONSE_TEXT , "" .join (data_buf )
176
182
)
177
183
span .__exit__ (None , None , None )
178
184
0 commit comments