Skip to content

Commit 7cfb531

Browse files
chore(ai): deprecate rest of ai.* attributes (#264)
* chore: marking the rest of the `ai.*` attributes as deprecated * fix: adding reverse deprecation aliases * chore: running yarn generate && yarn format
1 parent 4160099 commit 7cfb531

19 files changed

+340
-16
lines changed

javascript/sentry-conventions/src/attributes.ts

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
*
1212
* Attribute defined in OTEL: No
1313
*
14+
* @deprecated
1415
* @example ["Citation 1","Citation 2"]
1516
*/
1617
export const AI_CITATIONS = 'ai.citations';
@@ -54,6 +55,7 @@ export type AI_COMPLETION_TOKENS_USED_TYPE = number;
5455
*
5556
* Attribute defined in OTEL: No
5657
*
58+
* @deprecated
5759
* @example ["document1.txt","document2.pdf"]
5860
*/
5961
export const AI_DOCUMENTS = 'ai.documents';
@@ -189,6 +191,7 @@ export type AI_INPUT_MESSAGES_TYPE = string;
189191
*
190192
* Attribute defined in OTEL: No
191193
*
194+
* @deprecated
192195
* @example false
193196
*/
194197
export const AI_IS_SEARCH_REQUIRED = 'ai.is_search_required';
@@ -209,6 +212,7 @@ export type AI_IS_SEARCH_REQUIRED_TYPE = boolean;
209212
*
210213
* Attribute defined in OTEL: No
211214
*
215+
* @deprecated
212216
* @example "{\"user_id\": 123, \"session_id\": \"abc123\"}"
213217
*/
214218
export const AI_METADATA = 'ai.metadata';
@@ -298,6 +302,9 @@ export type AI_PIPELINE_NAME_TYPE = string;
298302
*
299303
* Attribute defined in OTEL: No
300304
*
305+
* Aliases: {@link GEN_AI_SYSTEM_INSTRUCTIONS} `gen_ai.system_instructions`
306+
*
307+
* @deprecated Use {@link GEN_AI_SYSTEM_INSTRUCTIONS} (gen_ai.system_instructions) instead
301308
* @example "You are now a clown."
302309
*/
303310
export const AI_PREAMBLE = 'ai.preamble';
@@ -364,6 +371,7 @@ export type AI_PROMPT_TOKENS_USED_TYPE = number;
364371
*
365372
* Attribute defined in OTEL: No
366373
*
374+
* @deprecated
367375
* @example true
368376
*/
369377
export const AI_RAW_PROMPTING = 'ai.raw_prompting';
@@ -405,6 +413,7 @@ export type AI_RESPONSES_TYPE = Array<string>;
405413
*
406414
* Attribute defined in OTEL: No
407415
*
416+
* @deprecated
408417
* @example "json_object"
409418
*/
410419
export const AI_RESPONSE_FORMAT = 'ai.response_format';
@@ -425,6 +434,7 @@ export type AI_RESPONSE_FORMAT_TYPE = string;
425434
*
426435
* Attribute defined in OTEL: No
427436
*
437+
* @deprecated
428438
* @example ["climate change effects","renewable energy"]
429439
*/
430440
export const AI_SEARCH_QUERIES = 'ai.search_queries';
@@ -445,6 +455,7 @@ export type AI_SEARCH_QUERIES_TYPE = Array<string>;
445455
*
446456
* Attribute defined in OTEL: No
447457
*
458+
* @deprecated
448459
* @example ["search_result_1, search_result_2"]
449460
*/
450461
export const AI_SEARCH_RESULTS = 'ai.search_results';
@@ -511,6 +522,7 @@ export type AI_STREAMING_TYPE = boolean;
511522
*
512523
* Attribute defined in OTEL: No
513524
*
525+
* @deprecated
514526
* @example "{\"executed_function\": \"add_integers\"}"
515527
*/
516528
export const AI_TAGS = 'ai.tags';
@@ -554,6 +566,9 @@ export type AI_TEMPERATURE_TYPE = number;
554566
*
555567
* Attribute defined in OTEL: No
556568
*
569+
* Aliases: {@link GEN_AI_INPUT_MESSAGES} `gen_ai.input.messages`
570+
*
571+
* @deprecated Use {@link GEN_AI_INPUT_MESSAGES} (gen_ai.input.messages) instead
557572
* @example ["Hello, how are you?","What is the capital of France?"]
558573
*/
559574
export const AI_TEXTS = 'ai.texts';
@@ -662,6 +677,9 @@ export type AI_TOP_P_TYPE = number;
662677
*
663678
* Attribute defined in OTEL: No
664679
*
680+
* Aliases: {@link GEN_AI_COST_TOTAL_TOKENS} `gen_ai.cost.total_tokens`
681+
*
682+
* @deprecated Use {@link GEN_AI_COST_TOTAL_TOKENS} (gen_ai.cost.total_tokens) instead
665683
* @example 12.34
666684
*/
667685
export const AI_TOTAL_COST = 'ai.total_cost';
@@ -705,6 +723,7 @@ export type AI_TOTAL_TOKENS_USED_TYPE = number;
705723
*
706724
* Attribute defined in OTEL: No
707725
*
726+
* @deprecated
708727
* @example ["Token limit exceeded"]
709728
*/
710729
export const AI_WARNINGS = 'ai.warnings';
@@ -2196,6 +2215,8 @@ export type GEN_AI_COST_OUTPUT_TOKENS_TYPE = number;
21962215
*
21972216
* Attribute defined in OTEL: No
21982217
*
2218+
* Aliases: {@link AI_TOTAL_COST} `ai.total_cost`
2219+
*
21992220
* @example 12.34
22002221
*/
22012222
export const GEN_AI_COST_TOTAL_TOKENS = 'gen_ai.cost.total_tokens';
@@ -2236,6 +2257,8 @@ export type GEN_AI_EMBEDDINGS_INPUT_TYPE = string;
22362257
*
22372258
* Attribute defined in OTEL: Yes
22382259
*
2260+
* Aliases: {@link AI_TEXTS} `ai.texts`
2261+
*
22392262
* @example "[{\"role\": \"user\", \"parts\": [{\"type\": \"text\", \"content\": \"Weather in Paris?\"}]}, {\"role\": \"assistant\", \"parts\": [{\"type\": \"tool_call\", \"id\": \"call_VSPygqKTWdrhaFErNvMV18Yl\", \"name\": \"get_weather\", \"arguments\": {\"location\": \"Paris\"}}]}, {\"role\": \"tool\", \"parts\": [{\"type\": \"tool_call_response\", \"id\": \"call_VSPygqKTWdrhaFErNvMV18Yl\", \"result\": \"rainy, 57°F\"}]}]"
22402263
*/
22412264
export const GEN_AI_INPUT_MESSAGES = 'gen_ai.input.messages';
@@ -2790,6 +2813,8 @@ export type GEN_AI_SYSTEM_TYPE = string;
27902813
*
27912814
* Attribute defined in OTEL: Yes
27922815
*
2816+
* Aliases: {@link AI_PREAMBLE} `ai.preamble`
2817+
*
27932818
* @example "You are a helpful assistant"
27942819
*/
27952820
export const GEN_AI_SYSTEM_INSTRUCTIONS = 'gen_ai.system_instructions';
@@ -9773,6 +9798,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
97739798
},
97749799
isInOtel: false,
97759800
example: ['Citation 1', 'Citation 2'],
9801+
deprecation: {},
97769802
},
97779803
[AI_COMPLETION_TOKENS_USED]: {
97789804
brief: 'The number of tokens used to respond to the message.',
@@ -9796,6 +9822,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
97969822
},
97979823
isInOtel: false,
97989824
example: ['document1.txt', 'document2.pdf'],
9825+
deprecation: {},
97999826
},
98009827
[AI_FINISH_REASON]: {
98019828
brief: 'The reason why the model stopped generating.',
@@ -9873,6 +9900,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
98739900
},
98749901
isInOtel: false,
98759902
example: false,
9903+
deprecation: {},
98769904
},
98779905
[AI_METADATA]: {
98789906
brief: 'Extra metadata passed to an AI pipeline step.',
@@ -9882,6 +9910,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
98829910
},
98839911
isInOtel: false,
98849912
example: '{"user_id": 123, "session_id": "abc123"}',
9913+
deprecation: {},
98859914
},
98869915
[AI_MODEL_ID]: {
98879916
brief: 'The vendor-specific ID of the model used.',
@@ -9932,6 +9961,10 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
99329961
},
99339962
isInOtel: false,
99349963
example: 'You are now a clown.',
9964+
deprecation: {
9965+
replacement: 'gen_ai.system_instructions',
9966+
},
9967+
aliases: [GEN_AI_SYSTEM_INSTRUCTIONS],
99359968
},
99369969
[AI_PRESENCE_PENALTY]: {
99379970
brief:
@@ -9969,6 +10002,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
996910002
},
997010003
isInOtel: false,
997110004
example: true,
10005+
deprecation: {},
997210006
},
997310007
[AI_RESPONSES]: {
997410008
brief: 'The response messages sent back by the AI model.',
@@ -9991,6 +10025,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
999110025
},
999210026
isInOtel: false,
999310027
example: 'json_object',
10028+
deprecation: {},
999410029
},
999510030
[AI_SEARCH_QUERIES]: {
999610031
brief: 'Queries used to search for relevant context or documents.',
@@ -10000,6 +10035,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1000010035
},
1000110036
isInOtel: false,
1000210037
example: ['climate change effects', 'renewable energy'],
10038+
deprecation: {},
1000310039
},
1000410040
[AI_SEARCH_RESULTS]: {
1000510041
brief: 'Results returned from search queries for context.',
@@ -10009,6 +10045,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1000910045
},
1001010046
isInOtel: false,
1001110047
example: ['search_result_1, search_result_2'],
10048+
deprecation: {},
1001210049
},
1001310050
[AI_SEED]: {
1001410051
brief: 'The seed, ideally models given the same seed and same other parameters will produce the exact same output.',
@@ -10045,6 +10082,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1004510082
},
1004610083
isInOtel: false,
1004710084
example: '{"executed_function": "add_integers"}',
10085+
deprecation: {},
1004810086
},
1004910087
[AI_TEMPERATURE]: {
1005010088
brief:
@@ -10068,6 +10106,10 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1006810106
},
1006910107
isInOtel: false,
1007010108
example: ['Hello, how are you?', 'What is the capital of France?'],
10109+
deprecation: {
10110+
replacement: 'gen_ai.input.messages',
10111+
},
10112+
aliases: [GEN_AI_INPUT_MESSAGES],
1007110113
},
1007210114
[AI_TOOLS]: {
1007310115
brief: 'For an AI model call, the functions that are available',
@@ -10129,6 +10171,10 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1012910171
},
1013010172
isInOtel: false,
1013110173
example: 12.34,
10174+
deprecation: {
10175+
replacement: 'gen_ai.cost.total_tokens',
10176+
},
10177+
aliases: [GEN_AI_COST_TOTAL_TOKENS],
1013210178
},
1013310179
[AI_TOTAL_TOKENS_USED]: {
1013410180
brief: 'The total number of tokens used to process the prompt.',
@@ -10152,6 +10198,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1015210198
},
1015310199
isInOtel: false,
1015410200
example: ['Token limit exceeded'],
10201+
deprecation: {},
1015510202
},
1015610203
[APP_START_TYPE]: {
1015710204
brief: 'Mobile app start variant. Either cold or warm.',
@@ -10890,6 +10937,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1089010937
},
1089110938
isInOtel: false,
1089210939
example: 12.34,
10940+
aliases: [AI_TOTAL_COST],
1089310941
},
1089410942
[GEN_AI_EMBEDDINGS_INPUT]: {
1089510943
brief: 'The input to the embeddings model.',
@@ -10910,6 +10958,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1091010958
isInOtel: true,
1091110959
example:
1091210960
'[{"role": "user", "parts": [{"type": "text", "content": "Weather in Paris?"}]}, {"role": "assistant", "parts": [{"type": "tool_call", "id": "call_VSPygqKTWdrhaFErNvMV18Yl", "name": "get_weather", "arguments": {"location": "Paris"}}]}, {"role": "tool", "parts": [{"type": "tool_call_response", "id": "call_VSPygqKTWdrhaFErNvMV18Yl", "result": "rainy, 57°F"}]}]',
10961+
aliases: [AI_TEXTS],
1091310962
},
1091410963
[GEN_AI_OPERATION_NAME]: {
1091510964
brief:
@@ -11190,6 +11239,7 @@ export const ATTRIBUTE_METADATA: Record<AttributeName, AttributeMetadata> = {
1119011239
},
1119111240
isInOtel: true,
1119211241
example: 'You are a helpful assistant',
11242+
aliases: [AI_PREAMBLE],
1119311243
},
1119411244
[GEN_AI_SYSTEM_MESSAGE]: {
1119511245
brief: 'The system instructions passed to the model.',

model/attributes/ai/ai__citations.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,8 @@
66
"key": "true"
77
},
88
"is_in_otel": false,
9-
"example": ["Citation 1", "Citation 2"]
9+
"example": ["Citation 1", "Citation 2"],
10+
"deprecation": {
11+
"_status": null
12+
}
1013
}

model/attributes/ai/ai__documents.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,8 @@
66
"key": "true"
77
},
88
"is_in_otel": false,
9-
"example": ["document1.txt", "document2.pdf"]
9+
"example": ["document1.txt", "document2.pdf"],
10+
"deprecation": {
11+
"_status": null
12+
}
1013
}

model/attributes/ai/ai__is_search_required.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,8 @@
66
"key": "false"
77
},
88
"is_in_otel": false,
9-
"example": false
9+
"example": false,
10+
"deprecation": {
11+
"_status": null
12+
}
1013
}

model/attributes/ai/ai__metadata.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,8 @@
66
"key": "maybe"
77
},
88
"is_in_otel": false,
9-
"example": "{\"user_id\": 123, \"session_id\": \"abc123\"}"
9+
"example": "{\"user_id\": 123, \"session_id\": \"abc123\"}",
10+
"deprecation": {
11+
"_status": null
12+
}
1013
}

model/attributes/ai/ai__preamble.json

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,10 @@
66
"key": "true"
77
},
88
"is_in_otel": false,
9-
"example": "You are now a clown."
9+
"example": "You are now a clown.",
10+
"deprecation": {
11+
"_status": null,
12+
"replacement": "gen_ai.system_instructions"
13+
},
14+
"alias": ["gen_ai.system_instructions"]
1015
}

model/attributes/ai/ai__raw_prompting.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,8 @@
66
"key": "false"
77
},
88
"is_in_otel": false,
9-
"example": true
9+
"example": true,
10+
"deprecation": {
11+
"_status": null
12+
}
1013
}

model/attributes/ai/ai__response_format.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,8 @@
66
"key": "maybe"
77
},
88
"is_in_otel": false,
9-
"example": "json_object"
9+
"example": "json_object",
10+
"deprecation": {
11+
"_status": null
12+
}
1013
}

model/attributes/ai/ai__search_queries.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,8 @@
66
"key": "true"
77
},
88
"is_in_otel": false,
9-
"example": ["climate change effects", "renewable energy"]
9+
"example": ["climate change effects", "renewable energy"],
10+
"deprecation": {
11+
"_status": null
12+
}
1013
}

model/attributes/ai/ai__search_results.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,8 @@
66
"key": "true"
77
},
88
"is_in_otel": false,
9-
"example": ["search_result_1, search_result_2"]
9+
"example": ["search_result_1, search_result_2"],
10+
"deprecation": {
11+
"_status": null
12+
}
1013
}

0 commit comments

Comments
 (0)