Skip to content

Commit 5bf7306

Browse files
committed
git add
1 parent fc8fd69 commit 5bf7306

File tree

2 files changed

+182
-0
lines changed

2 files changed

+182
-0
lines changed
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
/*
2+
* Copyright The OpenTelemetry Authors
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
// Gen AI conventions
18+
19+
export const ATTR_GEN_AI_SYSTEM = 'gen_ai.system';
20+
export const ATTR_GEN_AI_OPERATION_NAME = 'gen_ai.operation.name';
21+
export const ATTR_GEN_AI_REQUEST_MODEL = 'gen_ai.request.model';
22+
export const ATTR_GEN_AI_REQUEST_MAX_TOKENS = 'gen_ai.request.max_tokens';
23+
export const ATTR_GEN_AI_REQUEST_TEMPERATURE = 'gen_ai.request.temperature';
24+
export const ATTR_GEN_AI_REQUEST_TOP_P = 'gen_ai.request.top_p';
25+
export const ATTR_GEN_AI_REQUEST_STOP_SEQUENCES =
26+
'gen_ai.request.stop_sequences';
27+
export const ATTR_GEN_AI_USAGE_INPUT_TOKENS = 'gen_ai.usage.input_tokens';
28+
export const ATTR_GEN_AI_USAGE_OUTPUT_TOKENS = 'gen_ai.usage.output_tokens';
29+
export const ATTR_GEN_AI_RESPONSE_FINISH_REASONS =
30+
'gen_ai.response.finish_reasons';
31+
32+
export const GEN_AI_SYSTEM_VALUE_AWS_BEDROCK = 'aws.bedrock';
33+
export const GEN_AI_OPERATION_NAME_VALUE_CHAT = 'chat';
Lines changed: 149 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,149 @@
1+
/*
2+
* Copyright The OpenTelemetry Authors
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
/**
18+
* These tests verify telemetry created against actual API responses
19+
* which can be difficult to mock for LLMs. The responses are recorded
20+
* automatically using nock's nock-back feature. Responses are recorded
21+
* to the mock-responses directory with the name of the test - by default
22+
* if a response is available for the current test it is used, and
23+
* otherwise a real request is made and the response is recorded.
24+
* To re-record all responses, set the NOCK_BACK_MODE environment variable
25+
* to 'update' - when recording responses, valid AWS credentials for
26+
* accessing bedrock are also required.
27+
*/
28+
29+
import {
30+
getTestSpans,
31+
registerInstrumentationTesting,
32+
} from '@opentelemetry/contrib-test-utils';
33+
import { AwsInstrumentation } from '../src';
34+
registerInstrumentationTesting(new AwsInstrumentation());
35+
36+
import {
37+
BedrockRuntimeClient,
38+
ConverseCommand,
39+
ConversationRole,
40+
} from '@aws-sdk/client-bedrock-runtime';
41+
import * as path from 'path';
42+
import { Definition, back as nockBack } from 'nock';
43+
44+
import { ReadableSpan } from '@opentelemetry/sdk-trace-base';
45+
import {
46+
ATTR_GEN_AI_SYSTEM,
47+
ATTR_GEN_AI_OPERATION_NAME,
48+
ATTR_GEN_AI_REQUEST_MODEL,
49+
ATTR_GEN_AI_REQUEST_MAX_TOKENS,
50+
ATTR_GEN_AI_REQUEST_TEMPERATURE,
51+
ATTR_GEN_AI_REQUEST_TOP_P,
52+
ATTR_GEN_AI_REQUEST_STOP_SEQUENCES,
53+
ATTR_GEN_AI_USAGE_INPUT_TOKENS,
54+
ATTR_GEN_AI_USAGE_OUTPUT_TOKENS,
55+
ATTR_GEN_AI_RESPONSE_FINISH_REASONS,
56+
GEN_AI_SYSTEM_VALUE_AWS_BEDROCK,
57+
GEN_AI_OPERATION_NAME_VALUE_CHAT,
58+
} from '../src/semconv';
59+
import { expect } from 'expect';
60+
61+
const region = 'us-east-1';
62+
63+
// Remove any data from recorded responses that could have sensitive data
64+
// and that we don't need for testing.
65+
const sanitizeRecordings = (scopes: Definition[]) => {
66+
for (const scope of scopes) {
67+
// Type definition seems to be incorrect of headers.
68+
const headers: string[] = (scope as any).rawHeaders;
69+
for (let i = 0; i < headers.length; i += 2) {
70+
if (headers[i].toLowerCase() === 'set-cookie') {
71+
headers.splice(i, 2);
72+
}
73+
}
74+
}
75+
return scopes;
76+
};
77+
78+
describe('Bedrock', () => {
79+
const client = new BedrockRuntimeClient({ region });
80+
81+
nockBack.fixtures = path.join(__dirname, 'mock-responses');
82+
if (!process.env.NOCK_BACK_MODE) {
83+
nockBack.setMode('record');
84+
}
85+
86+
let nockDone: () => void;
87+
beforeEach(async function () {
88+
const filename = `${this.currentTest
89+
?.fullTitle()
90+
.toLowerCase()
91+
.replace(/\s/g, '-')}.json`;
92+
const { nockDone: nd } = await nockBack(filename, {
93+
afterRecord: sanitizeRecordings,
94+
});
95+
nockDone = nd;
96+
});
97+
98+
afterEach(async function () {
99+
nockDone();
100+
});
101+
102+
describe('Converse', () => {
103+
it('adds genai conventions', async () => {
104+
const modelId = 'amazon.titan-text-lite-v1';
105+
const messages = [
106+
{
107+
role: ConversationRole.USER,
108+
content: [{ text: 'Say this is a test' }],
109+
},
110+
];
111+
const inferenceConfig = {
112+
maxTokens: 10,
113+
temperature: 0.8,
114+
topP: 1,
115+
stopSequences: ['|'],
116+
};
117+
118+
const command = new ConverseCommand({
119+
modelId,
120+
messages,
121+
inferenceConfig,
122+
});
123+
const response = await client.send(command);
124+
expect(response.output?.message?.content?.[0].text).toBe(
125+
"Hi. I'm not sure what"
126+
);
127+
128+
const testSpans: ReadableSpan[] = getTestSpans();
129+
const converseSpans: ReadableSpan[] = testSpans.filter(
130+
(s: ReadableSpan) => {
131+
return s.name === 'chat amazon.titan-text-lite-v1';
132+
}
133+
);
134+
expect(converseSpans.length).toBe(1);
135+
expect(converseSpans[0].attributes).toMatchObject({
136+
[ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_AWS_BEDROCK,
137+
[ATTR_GEN_AI_OPERATION_NAME]: GEN_AI_OPERATION_NAME_VALUE_CHAT,
138+
[ATTR_GEN_AI_REQUEST_MODEL]: modelId,
139+
[ATTR_GEN_AI_REQUEST_MAX_TOKENS]: 10,
140+
[ATTR_GEN_AI_REQUEST_TEMPERATURE]: 0.8,
141+
[ATTR_GEN_AI_REQUEST_TOP_P]: 1,
142+
[ATTR_GEN_AI_REQUEST_STOP_SEQUENCES]: ['|'],
143+
[ATTR_GEN_AI_USAGE_INPUT_TOKENS]: 8,
144+
[ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: 10,
145+
[ATTR_GEN_AI_RESPONSE_FINISH_REASONS]: ['max_tokens'],
146+
});
147+
});
148+
});
149+
});

0 commit comments

Comments
 (0)