@@ -12,8 +12,8 @@ describe('Anthropic integration', () => {
12
12
// First span - basic message completion without PII
13
13
expect . objectContaining ( {
14
14
data : {
15
- 'gen_ai.operation.name' : 'messages.create ' ,
16
- 'sentry.op' : 'gen_ai.messages.create ' ,
15
+ 'gen_ai.operation.name' : 'messages' ,
16
+ 'sentry.op' : 'gen_ai.messages' ,
17
17
'sentry.origin' : 'auto.ai.anthropic' ,
18
18
'gen_ai.system' : 'anthropic' ,
19
19
'gen_ai.request.model' : 'claude-3-haiku-20240307' ,
@@ -25,53 +25,53 @@ describe('Anthropic integration', () => {
25
25
'gen_ai.usage.output_tokens' : 15 ,
26
26
'gen_ai.usage.total_tokens' : 25 ,
27
27
} ,
28
- description : 'messages.create claude-3-haiku-20240307' ,
29
- op : 'gen_ai.messages.create ' ,
28
+ description : 'messages claude-3-haiku-20240307' ,
29
+ op : 'gen_ai.messages' ,
30
30
origin : 'auto.ai.anthropic' ,
31
31
status : 'ok' ,
32
32
} ) ,
33
33
// Second span - error handling
34
34
expect . objectContaining ( {
35
35
data : {
36
- 'gen_ai.operation.name' : 'messages.create ' ,
37
- 'sentry.op' : 'gen_ai.messages.create ' ,
36
+ 'gen_ai.operation.name' : 'messages' ,
37
+ 'sentry.op' : 'gen_ai.messages' ,
38
38
'sentry.origin' : 'auto.ai.anthropic' ,
39
39
'gen_ai.system' : 'anthropic' ,
40
40
'gen_ai.request.model' : 'error-model' ,
41
41
} ,
42
- description : 'messages.create error-model' ,
43
- op : 'gen_ai.messages.create ' ,
42
+ description : 'messages error-model' ,
43
+ op : 'gen_ai.messages' ,
44
44
origin : 'auto.ai.anthropic' ,
45
45
status : 'unknown_error' ,
46
46
} ) ,
47
47
// Third span - token counting (no response.text because recordOutputs=false by default)
48
48
expect . objectContaining ( {
49
49
data : {
50
- 'gen_ai.operation.name' : 'messages.countTokens ' ,
51
- 'sentry.op' : 'gen_ai.messages.countTokens ' ,
50
+ 'gen_ai.operation.name' : 'messages' ,
51
+ 'sentry.op' : 'gen_ai.messages' ,
52
52
'sentry.origin' : 'auto.ai.anthropic' ,
53
53
'gen_ai.system' : 'anthropic' ,
54
54
'gen_ai.request.model' : 'claude-3-haiku-20240307' ,
55
55
} ,
56
- description : 'messages.countTokens claude-3-haiku-20240307' ,
57
- op : 'gen_ai.messages.countTokens ' ,
56
+ description : 'messages claude-3-haiku-20240307' ,
57
+ op : 'gen_ai.messages' ,
58
58
origin : 'auto.ai.anthropic' ,
59
59
status : 'ok' ,
60
60
} ) ,
61
61
// Fourth span - models.retrieve
62
62
expect . objectContaining ( {
63
63
data : {
64
64
'anthropic.response.timestamp' : '2024-05-08T05:20:00.000Z' ,
65
- 'gen_ai.operation.name' : 'retrieve ' ,
66
- 'sentry.op' : 'gen_ai.retrieve ' ,
65
+ 'gen_ai.operation.name' : 'models ' ,
66
+ 'sentry.op' : 'gen_ai.models ' ,
67
67
'sentry.origin' : 'auto.ai.anthropic' ,
68
68
'gen_ai.system' : 'anthropic' ,
69
69
'gen_ai.request.model' : 'claude-3-haiku-20240307' ,
70
70
'gen_ai.response.id' : 'claude-3-haiku-20240307' ,
71
71
'gen_ai.response.model' : 'claude-3-haiku-20240307' ,
72
72
} ,
73
- description : 'retrieve claude-3-haiku-20240307' ,
74
- op : 'gen_ai.retrieve ' ,
73
+ description : 'models claude-3-haiku-20240307' ,
74
+ op : 'gen_ai.models ' ,
75
75
origin : 'auto.ai.anthropic' ,
76
76
status : 'ok' ,
77
77
} ) ,
@@ -84,8 +84,8 @@ describe('Anthropic integration', () => {
84
84
// First span - basic message completion with PII
85
85
expect . objectContaining ( {
86
86
data : {
87
- 'gen_ai.operation.name' : 'messages.create ' ,
88
- 'sentry.op' : 'gen_ai.messages.create ' ,
87
+ 'gen_ai.operation.name' : 'messages' ,
88
+ 'sentry.op' : 'gen_ai.messages' ,
89
89
'sentry.origin' : 'auto.ai.anthropic' ,
90
90
'gen_ai.system' : 'anthropic' ,
91
91
'gen_ai.request.model' : 'claude-3-haiku-20240307' ,
@@ -99,57 +99,56 @@ describe('Anthropic integration', () => {
99
99
'gen_ai.usage.output_tokens' : 15 ,
100
100
'gen_ai.usage.total_tokens' : 25 ,
101
101
} ,
102
- description : 'messages.create claude-3-haiku-20240307' ,
103
- op : 'gen_ai.messages.create ' ,
102
+ description : 'messages claude-3-haiku-20240307' ,
103
+ op : 'gen_ai.messages' ,
104
104
origin : 'auto.ai.anthropic' ,
105
105
status : 'ok' ,
106
106
} ) ,
107
107
// Second span - error handling with PII
108
108
expect . objectContaining ( {
109
109
data : {
110
- 'gen_ai.operation.name' : 'messages.create ' ,
111
- 'sentry.op' : 'gen_ai.messages.create ' ,
110
+ 'gen_ai.operation.name' : 'messages' ,
111
+ 'sentry.op' : 'gen_ai.messages' ,
112
112
'sentry.origin' : 'auto.ai.anthropic' ,
113
113
'gen_ai.system' : 'anthropic' ,
114
114
'gen_ai.request.model' : 'error-model' ,
115
115
'gen_ai.request.messages' : '[{"role":"user","content":"This will fail"}]' ,
116
116
} ,
117
- description : 'messages.create error-model' ,
118
- op : 'gen_ai.messages.create' ,
119
-
117
+ description : 'messages error-model' ,
118
+ op : 'gen_ai.messages' ,
120
119
origin : 'auto.ai.anthropic' ,
121
120
status : 'unknown_error' ,
122
121
} ) ,
123
122
// Third span - token counting with PII (response.text is present because sendDefaultPii=true enables recordOutputs)
124
123
expect . objectContaining ( {
125
124
data : {
126
- 'gen_ai.operation.name' : 'messages.countTokens ' ,
127
- 'sentry.op' : 'gen_ai.messages.countTokens ' ,
125
+ 'gen_ai.operation.name' : 'messages' ,
126
+ 'sentry.op' : 'gen_ai.messages' ,
128
127
'sentry.origin' : 'auto.ai.anthropic' ,
129
128
'gen_ai.system' : 'anthropic' ,
130
129
'gen_ai.request.model' : 'claude-3-haiku-20240307' ,
131
130
'gen_ai.request.messages' : '[{"role":"user","content":"What is the capital of France?"}]' ,
132
131
'gen_ai.response.text' : '15' , // Only present because recordOutputs=true when sendDefaultPii=true
133
132
} ,
134
- description : 'messages.countTokens claude-3-haiku-20240307' ,
135
- op : 'gen_ai.messages.countTokens ' ,
133
+ description : 'messages claude-3-haiku-20240307' ,
134
+ op : 'gen_ai.messages' ,
136
135
origin : 'auto.ai.anthropic' ,
137
136
status : 'ok' ,
138
137
} ) ,
139
138
// Fourth span - models.retrieve with PII
140
139
expect . objectContaining ( {
141
140
data : {
142
141
'anthropic.response.timestamp' : '2024-05-08T05:20:00.000Z' ,
143
- 'gen_ai.operation.name' : 'retrieve ' ,
144
- 'sentry.op' : 'gen_ai.retrieve ' ,
142
+ 'gen_ai.operation.name' : 'models ' ,
143
+ 'sentry.op' : 'gen_ai.models ' ,
145
144
'sentry.origin' : 'auto.ai.anthropic' ,
146
145
'gen_ai.system' : 'anthropic' ,
147
146
'gen_ai.request.model' : 'claude-3-haiku-20240307' ,
148
147
'gen_ai.response.id' : 'claude-3-haiku-20240307' ,
149
148
'gen_ai.response.model' : 'claude-3-haiku-20240307' ,
150
149
} ,
151
- description : 'retrieve claude-3-haiku-20240307' ,
152
- op : 'gen_ai.retrieve ' ,
150
+ description : 'models claude-3-haiku-20240307' ,
151
+ op : 'gen_ai.models ' ,
153
152
origin : 'auto.ai.anthropic' ,
154
153
status : 'ok' ,
155
154
} ) ,
@@ -169,23 +168,23 @@ describe('Anthropic integration', () => {
169
168
// Check token counting with options
170
169
expect . objectContaining ( {
171
170
data : expect . objectContaining ( {
172
- 'gen_ai.operation.name' : 'messages.countTokens ' ,
171
+ 'gen_ai.operation.name' : 'messages' ,
173
172
'gen_ai.request.messages' : expect . any ( String ) , // Should include messages when recordInputs: true
174
173
'gen_ai.response.text' : '15' , // Present because recordOutputs=true is set in options
175
174
} ) ,
176
- op : 'gen_ai.messages.countTokens ' ,
175
+ op : 'gen_ai.messages' ,
177
176
} ) ,
178
177
// Check models.retrieve with options
179
178
expect . objectContaining ( {
180
179
data : expect . objectContaining ( {
181
- 'gen_ai.operation.name' : 'retrieve ' ,
180
+ 'gen_ai.operation.name' : 'models ' ,
182
181
'gen_ai.system' : 'anthropic' ,
183
182
'gen_ai.request.model' : 'claude-3-haiku-20240307' ,
184
183
'gen_ai.response.id' : 'claude-3-haiku-20240307' ,
185
184
'gen_ai.response.model' : 'claude-3-haiku-20240307' ,
186
185
} ) ,
187
- op : 'gen_ai.retrieve ' ,
188
- description : 'retrieve claude-3-haiku-20240307' ,
186
+ op : 'gen_ai.models ' ,
187
+ description : 'models claude-3-haiku-20240307' ,
189
188
} ) ,
190
189
] ) ,
191
190
} ;
0 commit comments