@@ -31,6 +31,18 @@ import {
3131 CONTEXT_KEY_ALLOW_TRACE_CONTENT ,
3232 SpanAttributes ,
3333} from "@traceloop/ai-semantic-conventions" ;
34+ import {
35+ ATTR_GEN_AI_COMPLETION ,
36+ ATTR_GEN_AI_PROMPT ,
37+ ATTR_GEN_AI_REQUEST_MAX_TOKENS ,
38+ ATTR_GEN_AI_REQUEST_MODEL ,
39+ ATTR_GEN_AI_REQUEST_TEMPERATURE ,
40+ ATTR_GEN_AI_REQUEST_TOP_P ,
41+ ATTR_GEN_AI_RESPONSE_MODEL ,
42+ ATTR_GEN_AI_SYSTEM ,
43+ ATTR_GEN_AI_USAGE_COMPLETION_TOKENS ,
44+ ATTR_GEN_AI_USAGE_PROMPT_TOKENS ,
45+ } from "@opentelemetry/semantic-conventions/incubating" ;
3446import { AnthropicInstrumentationConfig } from "./types" ;
3547import { version } from "../package.json" ;
3648import type * as anthropic from "@anthropic-ai/sdk" ;
@@ -204,15 +216,15 @@ export class AnthropicInstrumentation extends InstrumentationBase {
204216 } ;
205217 } ) : Span {
206218 const attributes : Attributes = {
207- [ SpanAttributes . ATTR_GEN_AI_SYSTEM ] : "Anthropic" ,
219+ [ ATTR_GEN_AI_SYSTEM ] : "Anthropic" ,
208220 [ SpanAttributes . LLM_REQUEST_TYPE ] : type ,
209221 } ;
210222
211223 try {
212- attributes [ SpanAttributes . ATTR_GEN_AI_REQUEST_MODEL ] = params . model ;
213- attributes [ SpanAttributes . ATTR_GEN_AI_REQUEST_TEMPERATURE ] =
224+ attributes [ ATTR_GEN_AI_REQUEST_MODEL ] = params . model ;
225+ attributes [ ATTR_GEN_AI_REQUEST_TEMPERATURE ] =
214226 params . temperature ;
215- attributes [ SpanAttributes . ATTR_GEN_AI_REQUEST_TOP_P ] = params . top_p ;
227+ attributes [ ATTR_GEN_AI_REQUEST_TOP_P ] = params . top_p ;
216228 attributes [ SpanAttributes . LLM_TOP_K ] = params . top_k ;
217229
218230 // Handle thinking parameters (for beta messages)
@@ -224,10 +236,10 @@ export class AnthropicInstrumentation extends InstrumentationBase {
224236 }
225237
226238 if ( type === "completion" ) {
227- attributes [ SpanAttributes . ATTR_GEN_AI_REQUEST_MAX_TOKENS ] =
239+ attributes [ ATTR_GEN_AI_REQUEST_MAX_TOKENS ] =
228240 params . max_tokens_to_sample ;
229241 } else {
230- attributes [ SpanAttributes . ATTR_GEN_AI_REQUEST_MAX_TOKENS ] =
242+ attributes [ ATTR_GEN_AI_REQUEST_MAX_TOKENS ] =
231243 params . max_tokens ;
232244 }
233245
@@ -246,9 +258,9 @@ export class AnthropicInstrumentation extends InstrumentationBase {
246258
247259 // If a system prompt is provided, it should always be first
248260 if ( "system" in params && params . system !== undefined ) {
249- attributes [ `${ SpanAttributes . ATTR_GEN_AI_PROMPT } .0.role` ] =
261+ attributes [ `${ ATTR_GEN_AI_PROMPT } .0.role` ] =
250262 "system" ;
251- attributes [ `${ SpanAttributes . ATTR_GEN_AI_PROMPT } .0.content` ] =
263+ attributes [ `${ ATTR_GEN_AI_PROMPT } .0.content` ] =
252264 typeof params . system === "string"
253265 ? params . system
254266 : JSON . stringify ( params . system ) ;
@@ -258,21 +270,21 @@ export class AnthropicInstrumentation extends InstrumentationBase {
258270 params . messages . forEach ( ( message , index ) => {
259271 const currentIndex = index + promptIndex ;
260272 attributes [
261- `${ SpanAttributes . ATTR_GEN_AI_PROMPT } .${ currentIndex } .role`
273+ `${ ATTR_GEN_AI_PROMPT } .${ currentIndex } .role`
262274 ] = message . role ;
263275 if ( typeof message . content === "string" ) {
264276 attributes [
265- `${ SpanAttributes . ATTR_GEN_AI_PROMPT } .${ currentIndex } .content`
277+ `${ ATTR_GEN_AI_PROMPT } .${ currentIndex } .content`
266278 ] = ( message . content as string ) || "" ;
267279 } else {
268280 attributes [
269- `${ SpanAttributes . ATTR_GEN_AI_PROMPT } .${ currentIndex } .content`
281+ `${ ATTR_GEN_AI_PROMPT } .${ currentIndex } .content`
270282 ] = JSON . stringify ( message . content ) ;
271283 }
272284 } ) ;
273285 } else {
274- attributes [ `${ SpanAttributes . ATTR_GEN_AI_PROMPT } .0.role` ] = "user" ;
275- attributes [ `${ SpanAttributes . ATTR_GEN_AI_PROMPT } .0.content` ] =
286+ attributes [ `${ ATTR_GEN_AI_PROMPT } .0.role` ] = "user" ;
287+ attributes [ `${ ATTR_GEN_AI_PROMPT } .0.content` ] =
276288 params . prompt ;
277289 }
278290 }
@@ -483,7 +495,7 @@ export class AnthropicInstrumentation extends InstrumentationBase {
483495 } ) {
484496 try {
485497 span . setAttribute (
486- SpanAttributes . ATTR_GEN_AI_RESPONSE_MODEL ,
498+ ATTR_GEN_AI_RESPONSE_MODEL ,
487499 result . model ,
488500 ) ;
489501 if ( type === "chat" && result . usage ) {
@@ -492,39 +504,39 @@ export class AnthropicInstrumentation extends InstrumentationBase {
492504 result . usage ?. input_tokens + result . usage ?. output_tokens ,
493505 ) ;
494506 span . setAttribute (
495- SpanAttributes . ATTR_GEN_AI_USAGE_COMPLETION_TOKENS ,
507+ ATTR_GEN_AI_USAGE_COMPLETION_TOKENS ,
496508 result . usage ?. output_tokens ,
497509 ) ;
498510 span . setAttribute (
499- SpanAttributes . ATTR_GEN_AI_USAGE_PROMPT_TOKENS ,
511+ ATTR_GEN_AI_USAGE_PROMPT_TOKENS ,
500512 result . usage ?. input_tokens ,
501513 ) ;
502514 }
503515
504516 if ( result . stop_reason ) {
505517 span . setAttribute (
506- `${ SpanAttributes . ATTR_GEN_AI_COMPLETION } .0.finish_reason` ,
518+ `${ ATTR_GEN_AI_COMPLETION } .0.finish_reason` ,
507519 result . stop_reason ,
508520 ) ;
509521 }
510522
511523 if ( this . _shouldSendPrompts ( ) ) {
512524 if ( type === "chat" ) {
513525 span . setAttribute (
514- `${ SpanAttributes . ATTR_GEN_AI_COMPLETION } .0.role` ,
526+ `${ ATTR_GEN_AI_COMPLETION } .0.role` ,
515527 "assistant" ,
516528 ) ;
517529 span . setAttribute (
518- `${ SpanAttributes . ATTR_GEN_AI_COMPLETION } .0.content` ,
530+ `${ ATTR_GEN_AI_COMPLETION } .0.content` ,
519531 JSON . stringify ( result . content ) ,
520532 ) ;
521533 } else {
522534 span . setAttribute (
523- `${ SpanAttributes . ATTR_GEN_AI_COMPLETION } .0.role` ,
535+ `${ ATTR_GEN_AI_COMPLETION } .0.role` ,
524536 "assistant" ,
525537 ) ;
526538 span . setAttribute (
527- `${ SpanAttributes . ATTR_GEN_AI_COMPLETION } .0.content` ,
539+ `${ ATTR_GEN_AI_COMPLETION } .0.content` ,
528540 result . completion ,
529541 ) ;
530542 }
0 commit comments