Skip to content

Commit 93009b1

Browse files
committed
use from conventions
1 parent 0d82d9e commit 93009b1

File tree

63 files changed

+1194
-936
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

63 files changed

+1194
-936
lines changed

packages/ai-semantic-conventions/src/SemanticAttributes.ts

Lines changed: 0 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -14,60 +14,8 @@
1414
* limitations under the License.
1515
*/
1616

17-
import {
18-
ATTR_GEN_AI_AGENT_NAME,
19-
ATTR_GEN_AI_COMPLETION,
20-
ATTR_GEN_AI_CONVERSATION_ID,
21-
ATTR_GEN_AI_INPUT_MESSAGES,
22-
ATTR_GEN_AI_OPERATION_NAME,
23-
ATTR_GEN_AI_OUTPUT_MESSAGES,
24-
ATTR_GEN_AI_PROMPT,
25-
ATTR_GEN_AI_PROVIDER_NAME,
26-
ATTR_GEN_AI_REQUEST_MAX_TOKENS,
27-
ATTR_GEN_AI_REQUEST_MODEL,
28-
ATTR_GEN_AI_REQUEST_TEMPERATURE,
29-
ATTR_GEN_AI_REQUEST_TOP_P,
30-
ATTR_GEN_AI_RESPONSE_FINISH_REASONS,
31-
ATTR_GEN_AI_RESPONSE_ID,
32-
ATTR_GEN_AI_RESPONSE_MODEL,
33-
ATTR_GEN_AI_SYSTEM,
34-
ATTR_GEN_AI_TOOL_CALL_ARGUMENTS,
35-
ATTR_GEN_AI_TOOL_CALL_ID,
36-
ATTR_GEN_AI_TOOL_CALL_RESULT,
37-
ATTR_GEN_AI_TOOL_NAME,
38-
ATTR_GEN_AI_USAGE_COMPLETION_TOKENS,
39-
ATTR_GEN_AI_USAGE_INPUT_TOKENS,
40-
ATTR_GEN_AI_USAGE_OUTPUT_TOKENS,
41-
ATTR_GEN_AI_USAGE_PROMPT_TOKENS,
42-
// @ts-expect-error - Using exports path that TypeScript doesn't recognize but works at runtime
43-
} from "@opentelemetry/semantic-conventions/incubating";
4417

4518
export const SpanAttributes = {
46-
ATTR_GEN_AI_SYSTEM,
47-
ATTR_GEN_AI_REQUEST_MODEL,
48-
ATTR_GEN_AI_REQUEST_MAX_TOKENS,
49-
ATTR_GEN_AI_REQUEST_TEMPERATURE,
50-
ATTR_GEN_AI_REQUEST_TOP_P,
51-
ATTR_GEN_AI_PROMPT,
52-
ATTR_GEN_AI_COMPLETION,
53-
ATTR_GEN_AI_INPUT_MESSAGES,
54-
ATTR_GEN_AI_OUTPUT_MESSAGES,
55-
ATTR_GEN_AI_RESPONSE_MODEL,
56-
ATTR_GEN_AI_USAGE_PROMPT_TOKENS,
57-
ATTR_GEN_AI_USAGE_COMPLETION_TOKENS,
58-
ATTR_GEN_AI_USAGE_INPUT_TOKENS,
59-
ATTR_GEN_AI_USAGE_OUTPUT_TOKENS,
60-
ATTR_GEN_AI_OPERATION_NAME,
61-
ATTR_GEN_AI_PROVIDER_NAME,
62-
ATTR_GEN_AI_TOOL_NAME,
63-
ATTR_GEN_AI_TOOL_CALL_ID,
64-
ATTR_GEN_AI_TOOL_CALL_ARGUMENTS,
65-
ATTR_GEN_AI_TOOL_CALL_RESULT,
66-
ATTR_GEN_AI_RESPONSE_ID,
67-
ATTR_GEN_AI_RESPONSE_FINISH_REASONS,
68-
ATTR_GEN_AI_CONVERSATION_ID,
69-
ATTR_GEN_AI_AGENT_NAME,
70-
7119
// Attributes not yet in @opentelemetry/semantic-conventions
7220
GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS:
7321
"gen_ai.usage.cache_creation_input_tokens",

packages/ai-semantic-conventions/tsconfig.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22
"extends": "../../tsconfig.base.json",
33
"compilerOptions": {
44
"outDir": "dist",
5-
"rootDir": "."
5+
"rootDir": ".",
6+
"moduleResolution": "node16",
7+
"module": "node16"
68
},
79
"files": [],
810
"include": ["src/**/*.ts", "test/**/*.ts"],

packages/instrumentation-anthropic/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
"@opentelemetry/api": "^1.9.0",
4242
"@opentelemetry/core": "^2.0.1",
4343
"@opentelemetry/instrumentation": "^0.203.0",
44-
"@opentelemetry/semantic-conventions": "^1.36.0",
44+
"@opentelemetry/semantic-conventions": "^1.38.0",
4545
"@traceloop/ai-semantic-conventions": "workspace:*",
4646
"tslib": "^2.8.1"
4747
},

packages/instrumentation-anthropic/src/instrumentation.ts

Lines changed: 33 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,18 @@ import {
3131
CONTEXT_KEY_ALLOW_TRACE_CONTENT,
3232
SpanAttributes,
3333
} from "@traceloop/ai-semantic-conventions";
34+
import {
35+
ATTR_GEN_AI_COMPLETION,
36+
ATTR_GEN_AI_PROMPT,
37+
ATTR_GEN_AI_REQUEST_MAX_TOKENS,
38+
ATTR_GEN_AI_REQUEST_MODEL,
39+
ATTR_GEN_AI_REQUEST_TEMPERATURE,
40+
ATTR_GEN_AI_REQUEST_TOP_P,
41+
ATTR_GEN_AI_RESPONSE_MODEL,
42+
ATTR_GEN_AI_SYSTEM,
43+
ATTR_GEN_AI_USAGE_COMPLETION_TOKENS,
44+
ATTR_GEN_AI_USAGE_PROMPT_TOKENS,
45+
} from "@opentelemetry/semantic-conventions/incubating";
3446
import { AnthropicInstrumentationConfig } from "./types";
3547
import { version } from "../package.json";
3648
import type * as anthropic from "@anthropic-ai/sdk";
@@ -204,15 +216,15 @@ export class AnthropicInstrumentation extends InstrumentationBase {
204216
};
205217
}): Span {
206218
const attributes: Attributes = {
207-
[SpanAttributes.ATTR_GEN_AI_SYSTEM]: "Anthropic",
219+
[ATTR_GEN_AI_SYSTEM]: "Anthropic",
208220
[SpanAttributes.LLM_REQUEST_TYPE]: type,
209221
};
210222

211223
try {
212-
attributes[SpanAttributes.ATTR_GEN_AI_REQUEST_MODEL] = params.model;
213-
attributes[SpanAttributes.ATTR_GEN_AI_REQUEST_TEMPERATURE] =
224+
attributes[ATTR_GEN_AI_REQUEST_MODEL] = params.model;
225+
attributes[ATTR_GEN_AI_REQUEST_TEMPERATURE] =
214226
params.temperature;
215-
attributes[SpanAttributes.ATTR_GEN_AI_REQUEST_TOP_P] = params.top_p;
227+
attributes[ATTR_GEN_AI_REQUEST_TOP_P] = params.top_p;
216228
attributes[SpanAttributes.LLM_TOP_K] = params.top_k;
217229

218230
// Handle thinking parameters (for beta messages)
@@ -224,10 +236,10 @@ export class AnthropicInstrumentation extends InstrumentationBase {
224236
}
225237

226238
if (type === "completion") {
227-
attributes[SpanAttributes.ATTR_GEN_AI_REQUEST_MAX_TOKENS] =
239+
attributes[ATTR_GEN_AI_REQUEST_MAX_TOKENS] =
228240
params.max_tokens_to_sample;
229241
} else {
230-
attributes[SpanAttributes.ATTR_GEN_AI_REQUEST_MAX_TOKENS] =
242+
attributes[ATTR_GEN_AI_REQUEST_MAX_TOKENS] =
231243
params.max_tokens;
232244
}
233245

@@ -246,9 +258,9 @@ export class AnthropicInstrumentation extends InstrumentationBase {
246258

247259
// If a system prompt is provided, it should always be first
248260
if ("system" in params && params.system !== undefined) {
249-
attributes[`${SpanAttributes.ATTR_GEN_AI_PROMPT}.0.role`] =
261+
attributes[`${ATTR_GEN_AI_PROMPT}.0.role`] =
250262
"system";
251-
attributes[`${SpanAttributes.ATTR_GEN_AI_PROMPT}.0.content`] =
263+
attributes[`${ATTR_GEN_AI_PROMPT}.0.content`] =
252264
typeof params.system === "string"
253265
? params.system
254266
: JSON.stringify(params.system);
@@ -258,21 +270,21 @@ export class AnthropicInstrumentation extends InstrumentationBase {
258270
params.messages.forEach((message, index) => {
259271
const currentIndex = index + promptIndex;
260272
attributes[
261-
`${SpanAttributes.ATTR_GEN_AI_PROMPT}.${currentIndex}.role`
273+
`${ATTR_GEN_AI_PROMPT}.${currentIndex}.role`
262274
] = message.role;
263275
if (typeof message.content === "string") {
264276
attributes[
265-
`${SpanAttributes.ATTR_GEN_AI_PROMPT}.${currentIndex}.content`
277+
`${ATTR_GEN_AI_PROMPT}.${currentIndex}.content`
266278
] = (message.content as string) || "";
267279
} else {
268280
attributes[
269-
`${SpanAttributes.ATTR_GEN_AI_PROMPT}.${currentIndex}.content`
281+
`${ATTR_GEN_AI_PROMPT}.${currentIndex}.content`
270282
] = JSON.stringify(message.content);
271283
}
272284
});
273285
} else {
274-
attributes[`${SpanAttributes.ATTR_GEN_AI_PROMPT}.0.role`] = "user";
275-
attributes[`${SpanAttributes.ATTR_GEN_AI_PROMPT}.0.content`] =
286+
attributes[`${ATTR_GEN_AI_PROMPT}.0.role`] = "user";
287+
attributes[`${ATTR_GEN_AI_PROMPT}.0.content`] =
276288
params.prompt;
277289
}
278290
}
@@ -483,7 +495,7 @@ export class AnthropicInstrumentation extends InstrumentationBase {
483495
}) {
484496
try {
485497
span.setAttribute(
486-
SpanAttributes.ATTR_GEN_AI_RESPONSE_MODEL,
498+
ATTR_GEN_AI_RESPONSE_MODEL,
487499
result.model,
488500
);
489501
if (type === "chat" && result.usage) {
@@ -492,39 +504,39 @@ export class AnthropicInstrumentation extends InstrumentationBase {
492504
result.usage?.input_tokens + result.usage?.output_tokens,
493505
);
494506
span.setAttribute(
495-
SpanAttributes.ATTR_GEN_AI_USAGE_COMPLETION_TOKENS,
507+
ATTR_GEN_AI_USAGE_COMPLETION_TOKENS,
496508
result.usage?.output_tokens,
497509
);
498510
span.setAttribute(
499-
SpanAttributes.ATTR_GEN_AI_USAGE_PROMPT_TOKENS,
511+
ATTR_GEN_AI_USAGE_PROMPT_TOKENS,
500512
result.usage?.input_tokens,
501513
);
502514
}
503515

504516
if (result.stop_reason) {
505517
span.setAttribute(
506-
`${SpanAttributes.ATTR_GEN_AI_COMPLETION}.0.finish_reason`,
518+
`${ATTR_GEN_AI_COMPLETION}.0.finish_reason`,
507519
result.stop_reason,
508520
);
509521
}
510522

511523
if (this._shouldSendPrompts()) {
512524
if (type === "chat") {
513525
span.setAttribute(
514-
`${SpanAttributes.ATTR_GEN_AI_COMPLETION}.0.role`,
526+
`${ATTR_GEN_AI_COMPLETION}.0.role`,
515527
"assistant",
516528
);
517529
span.setAttribute(
518-
`${SpanAttributes.ATTR_GEN_AI_COMPLETION}.0.content`,
530+
`${ATTR_GEN_AI_COMPLETION}.0.content`,
519531
JSON.stringify(result.content),
520532
);
521533
} else {
522534
span.setAttribute(
523-
`${SpanAttributes.ATTR_GEN_AI_COMPLETION}.0.role`,
535+
`${ATTR_GEN_AI_COMPLETION}.0.role`,
524536
"assistant",
525537
);
526538
span.setAttribute(
527-
`${SpanAttributes.ATTR_GEN_AI_COMPLETION}.0.content`,
539+
`${ATTR_GEN_AI_COMPLETION}.0.content`,
528540
result.completion,
529541
);
530542
}

0 commit comments

Comments
 (0)