Skip to content

Commit 6d92358

Browse files
karthikscale3darshit-s3obinnaokaforobinnascale3rohit-kadhe
authored
Release (#185)
* feat: adding gemini support (#162) * feat: adding gemini support * chore: update package lock * docs: update changelog for 5.2.0 release * feat: adding vertexai instrumentation support (#165) * feat: adding vertexai instrumentation support * style: remove commented import * chore: create types for vertexai * docs: revert vercel ai py check * docs: add mistral check * style: remove new lines * docs: add vertexai to changelog (#166) * fix: allow manual patch for Gemini and VertexAI (#169) * star github link * feat: adding vertexai tools tracing support (#172) * add mistral instrumentation * support otel env vars * bump version adn add chat stream method * add examples * update readme * update package-lock * Add sentry integration * merge with dev * llama index enhacements * sentry to filter non langtrace exceptions * Fix ingestion url (#181) * Obinna/s3 en 2724 add bedrock (#184) * add aws bedrock converse implementation * bump version * cleanup and update readme * update service type * fixes --------- Co-authored-by: Obinna Okafor <[email protected]> Co-authored-by: Karthik Kalyanaraman <[email protected]> --------- Co-authored-by: darshit-s3 <[email protected]> Co-authored-by: Darshit Suratwala <[email protected]> Co-authored-by: Obinna Okafor <[email protected]> Co-authored-by: obinnascale3 <[email protected]> Co-authored-by: Rohit Kadhe <[email protected]> Co-authored-by: Rohit Kadhe <[email protected]>
1 parent a66180c commit 6d92358

File tree

9 files changed

+1111
-446
lines changed

9 files changed

+1111
-446
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,7 @@ Langtrace automatically captures traces from the following vendors:
158158
| Langchain | Framework | :x: | :white_check_mark: |
159159
| LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
160160
| Langgraph | Framework | :x: | :white_check_mark: |
161+
| AWS Bedrock | Framework | :white_check_mark: | :x: |
161162
| DSPy | Framework | :x: | :white_check_mark: |
162163
| CrewAI | Framework | :x: | :white_check_mark: |
163164
| Ollama | Framework | :white_check_mark: | :white_check_mark: |

package-lock.json

Lines changed: 814 additions & 439 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@langtrase/typescript-sdk",
3-
"version": "6.0.0",
3+
"version": "6.1.0",
44
"description": "A typescript SDK for Langtrace",
55
"main": "dist/index.js",
66
"types": "dist/index.d.ts",
@@ -21,7 +21,8 @@
2121
"author": "Scale3 Labs",
2222
"license": "Apache-2.0",
2323
"dependencies": {
24-
"@langtrase/trace-attributes": "7.4.0",
24+
"@aws-sdk/client-bedrock-runtime": "^3.670.0",
25+
"@langtrase/trace-attributes": "7.5.0",
2526
"@opentelemetry/api": "^1.7.0",
2627
"@opentelemetry/instrumentation": "^0.49.1",
2728
"@opentelemetry/sdk-trace-base": "^1.22.0",
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
/*
2+
* Copyright (c) 2024 Scale3 Labs
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
import { init } from '@langtrace-init/init'
18+
import {
19+
BedrockRuntimeClient,
20+
ConverseCommand,
21+
Message
22+
} from '@aws-sdk/client-bedrock-runtime'
23+
import dotenv from 'dotenv'
24+
25+
dotenv.config()
26+
27+
init({ batch: false, write_spans_to_console: false, disable_instrumentations: { all_except: ['anthropic'] } })
28+
29+
const client = new BedrockRuntimeClient({ region: 'us-east-1' })
30+
31+
const userMessage =
32+
'Describe the purpose of a hello world program in one line.'
33+
const conversation: Message[] = [
34+
{
35+
role: 'user',
36+
content: [{ text: userMessage }]
37+
}
38+
]
39+
40+
const modelId = 'mistral.mistral-large-2402-v1:0'
41+
42+
export async function basic (): Promise<void> {
43+
// Create a command with the model ID, the message, and a basic configuration.
44+
const command = new ConverseCommand({
45+
modelId,
46+
messages: conversation,
47+
inferenceConfig: { maxTokens: 512, temperature: 0.5, topP: 0.9 }
48+
})
49+
50+
try {
51+
// Send the command to the model and wait for the response
52+
const response = await client.send(command)
53+
54+
// Extract and print the response text.
55+
const responseText = ((response.output?.message?.content?.length) != null) && response.output.message.content[0].text
56+
console.log(responseText)
57+
} catch (err) {
58+
console.log(`ERROR using '${modelId}'. Reason: ${err}`)
59+
process.exit(1)
60+
}
61+
}

src/examples/entrypoint.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
import { basic } from '@langtrace-examples/llamaindex/basic'
1+
import { basic } from '@langtrace-examples/awsbedrock/converse-anthropic'
22

33
void basic()

src/examples/vercel/basic.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1+
import { openai } from '@ai-sdk/openai'
12
import { init } from '@langtrace-init/init'
2-
import { openai } from '@ai-sdk/openai' // Ensure OPENAI_API_KEY environment variable is set
33
import ai from '@langtrace-module-wrappers/ai'
44
import { withAdditionalAttributes } from '@langtrace-utils/instrumentation'
55

@@ -9,7 +9,7 @@ init({
99
})
1010
export async function basic (): Promise<void> {
1111
await withAdditionalAttributes(async () => {
12-
const resp = await ai.generateText({
12+
await ai.generateText({
1313
model: openai('gpt-4-turbo', { user: 'abc' }),
1414
system: 'You are a friendly assistant!',
1515
// prompt: 'Why is the sky blue?',
@@ -39,11 +39,11 @@ export async function basicStream (): Promise<void> {
3939
}
4040

4141
export const basicEmbed = async (): Promise<void> => {
42-
const resp = await ai.embed({ model: openai.embedding('text-embedding-3-large', { user: 'abc', dimensions: 10, maxEmbeddingsPerCall: 2 }), value: 'hey there its a very nice day out today', maxRetries: 3 })
42+
await ai.embed({ model: openai.embedding('text-embedding-3-large', { user: 'abc', dimensions: 10, maxEmbeddingsPerCall: 2 }), value: 'hey there its a very nice day out today', maxRetries: 3 })
4343
// console.info(resp)
4444
}
4545

4646
export const basicEmbedMany = async (): Promise<void> => {
47-
const resp = await ai.embedMany({ model: openai.embedding('text-embedding-3-large', { user: 'abc', dimensions: 10, maxEmbeddingsPerCall: 2 }), values: ['hey there its a very nice day out today'], maxRetries: 3 })
47+
await ai.embedMany({ model: openai.embedding('text-embedding-3-large', { user: 'abc', dimensions: 10, maxEmbeddingsPerCall: 2 }), values: ['hey there its a very nice day out today'], maxRetries: 3 })
4848
// console.info(resp)
4949
}

src/init/init.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ import * as Sentry from '@sentry/node'
4646
import { SENTRY_DSN } from '@langtrace-constants/common'
4747
import { nodeProfilingIntegration } from '@sentry/profiling-node'
4848
import { mistralInstrumentation } from '@langtrace-instrumentation/mistral/instrumentation'
49+
import { awsbedrockInstrumentation } from '@langtrace-instrumentation/awsbedrock/instrumentation'
4950
import { LangtraceSdkError } from 'errors/sdk_error'
5051

5152
/**
@@ -169,6 +170,7 @@ export const init: LangTraceInit = ({
169170
anthropic: anthropicInstrumentation,
170171
gemini: geminiInstrumentation,
171172
mistral: mistralInstrumentation,
173+
awsbedrock: awsbedrockInstrumentation,
172174
groq: groqInstrumentation,
173175
pinecone: pineconeInstrumentation,
174176
llamaindex: llamaIndexInstrumentation,
Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
/*
2+
* Copyright (c) 2024 Scale3 Labs
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
import { diag } from '@opentelemetry/api'
18+
import {
19+
InstrumentationBase,
20+
InstrumentationNodeModuleDefinition,
21+
isWrapped
22+
} from '@opentelemetry/instrumentation'
23+
import { sendCommand } from '@langtrace-instrumentation/awsbedrock/patch'
24+
// eslint-disable-next-line no-restricted-imports
25+
import { version, name } from '../../../package.json'
26+
class AWSBedrockInstrumentation extends InstrumentationBase<any> {
27+
constructor () {
28+
super(name, version)
29+
}
30+
31+
public manualPatch (awsbedrock: any): void {
32+
diag.debug('Manually patching awsbedrock')
33+
this._patch(awsbedrock)
34+
}
35+
36+
init (): Array<InstrumentationNodeModuleDefinition<any>> {
37+
const module = new InstrumentationNodeModuleDefinition<any>(
38+
'@aws-sdk/client-bedrock-runtime',
39+
['>=3.668.0'],
40+
(moduleExports, moduleVersion) => {
41+
diag.debug(`Patching AWS Bedrock SDK version ${moduleVersion}`)
42+
this._patch(moduleExports, moduleVersion as string)
43+
return moduleExports
44+
},
45+
(moduleExports, moduleVersion) => {
46+
diag.debug(`Unpatching AWS Bedrock SDK version ${moduleVersion}`)
47+
if (moduleExports !== undefined) {
48+
this._unpatch(moduleExports)
49+
}
50+
}
51+
)
52+
53+
return [module]
54+
}
55+
56+
private _patch (awsbedrock: any, moduleVersion?: string): void {
57+
const wrapped = isWrapped(awsbedrock.BedrockRuntimeClient.prototype)
58+
if (wrapped) {
59+
this._unpatch(awsbedrock)
60+
}
61+
this._wrap(
62+
awsbedrock.BedrockRuntimeClient.prototype,
63+
'send',
64+
(originalMethod: (...args: any[]) => any) => {
65+
return sendCommand(originalMethod, this.tracer, this.instrumentationVersion, moduleVersion)
66+
}
67+
)
68+
}
69+
70+
private _unpatch (awsbedrock: any): void {
71+
this._unwrap(awsbedrock.BedrockRuntimeClient.prototype, 'send')
72+
}
73+
}
74+
75+
export const awsbedrockInstrumentation = new AWSBedrockInstrumentation()
Lines changed: 150 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,150 @@
1+
/*
2+
* Copyright (c) 2024 Scale3 Labs
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
import { LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY } from '@langtrace-constants/common'
18+
import { addSpanEvent } from '@langtrace-utils/misc'
19+
import { APIS, LLMSpanAttributes, Event, Vendors } from '@langtrase/trace-attributes'
20+
import {
21+
Exception,
22+
Span,
23+
SpanKind,
24+
SpanStatusCode,
25+
Tracer,
26+
context,
27+
trace
28+
} from '@opentelemetry/api'
29+
import { LangtraceSdkError } from 'errors/sdk_error'
30+
31+
export function sendCommand (
32+
originalMethod: (...args: any[]) => any,
33+
tracer: Tracer,
34+
langtraceVersion: string,
35+
version?: string,
36+
stream = false
37+
): (...args: any[]) => any {
38+
return async function (this: any, ...args: any[]) {
39+
// eslint-disable-next-line @typescript-eslint/no-this-alias
40+
const originalContext = this
41+
const customAttributes = context.active().getValue(LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY) ?? {}
42+
// Determine the service provider
43+
const serviceProvider: string = Vendors.AWSBEDROCK
44+
const attributes: LLMSpanAttributes = {
45+
'langtrace.sdk.name': '@langtrase/typescript-sdk',
46+
'gen_ai.operation.name': 'chat',
47+
'langtrace.service.name': serviceProvider,
48+
'langtrace.service.type': 'framework',
49+
'langtrace.service.version': version,
50+
'langtrace.version': langtraceVersion,
51+
'gen_ai.request.model': args[0]?.input.modelId,
52+
'url.full': originalContext?._client?.baseURL,
53+
'url.path': APIS.awsbedrock.CONVERSE.ENDPOINT,
54+
'http.max.retries': originalContext?._client?.maxRetries,
55+
'http.timeout': originalContext?._client?.timeout,
56+
'gen_ai.request.temperature': args[0]?.input?.inferenceConfig?.temperature,
57+
'gen_ai.request.top_p': args[0]?.input?.inferenceConfig?.topP,
58+
'gen_ai.user': args[0]?.user,
59+
'gen_ai.request.max_tokens': args[0]?.input?.inferenceConfig?.maxTokens,
60+
'gen_ai.request.tools': JSON.stringify(args[0]?.input?.toolConfig?.tools),
61+
...customAttributes
62+
}
63+
/* eslint-disable no-console */
64+
const spanName = customAttributes['langtrace.span.name' as keyof typeof customAttributes] ?? APIS.awsbedrock.CONVERSE.METHOD
65+
const span = tracer.startSpan(spanName, { kind: SpanKind.CLIENT, attributes }, context.active())
66+
return await context.with(
67+
trace.setSpan(context.active(), span),
68+
async () => {
69+
try {
70+
const resp = await originalMethod.apply(this, args)
71+
const message = args[0]?.input?.messages[0]
72+
const message_content = message?.content?.map((content: any) => ({ content: content.text, role: message.role }))
73+
addSpanEvent(span, Event.GEN_AI_PROMPT, { 'gen_ai.prompt': JSON.stringify(message_content) })
74+
if (resp.stream === undefined) {
75+
const responses = resp?.output?.message?.content?.map((content: any) => {
76+
const result = {
77+
role: resp?.output?.message?.role,
78+
content: content?.text !== undefined && content?.text !== null
79+
? content?.text
80+
: content?.toolUse !== undefined
81+
? JSON.stringify(content?.toolUse)
82+
: JSON.stringify(content?.toolResult)
83+
}
84+
return result
85+
})
86+
addSpanEvent(span, Event.GEN_AI_COMPLETION, { 'gen_ai.completion': JSON.stringify(responses) })
87+
const responseAttributes: Partial<LLMSpanAttributes> = {
88+
'gen_ai.response.model': args[0]?.input.modelId,
89+
'gen_ai.usage.input_tokens': resp.usage.inputTokens,
90+
'gen_ai.usage.output_tokens': resp.usage.outputTokens,
91+
'gen_ai.usage.total_tokens': resp.usage.totalTokens
92+
}
93+
span.setAttributes({ ...attributes, ...responseAttributes })
94+
span.setStatus({ code: SpanStatusCode.OK })
95+
return resp
96+
} else {
97+
await processConverseStream(resp.stream, span, attributes)
98+
return resp
99+
}
100+
} catch (error: any) {
101+
span.recordException(error as Exception)
102+
span.setStatus({ code: SpanStatusCode.ERROR })
103+
throw new LangtraceSdkError(error.message as string, error.stack as string)
104+
} finally {
105+
span.end()
106+
}
107+
}
108+
)
109+
}
110+
}
111+
112+
async function * processConverseStream (stream: any, span: Span, inputAttributes: Partial<LLMSpanAttributes>): any {
113+
const customAttributes = context.active().getValue(LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY) ?? {}
114+
addSpanEvent(span, Event.STREAM_START)
115+
116+
const result: string[] = []
117+
let completionTokens = 0
118+
let promptTokens = 0
119+
120+
try {
121+
for await (const chunk of stream) {
122+
const deserializedChunk = await stream.options.deserializer(chunk)
123+
// eslint-disable-next-line @typescript-eslint/strict-boolean-expressions
124+
const content = deserializedChunk.contentBlockDelta?.delta?.text || ''
125+
promptTokens = deserializedChunk.metadata?.usage?.inputTokens ?? 0
126+
completionTokens = deserializedChunk.metadata?.usage?.outputTokens ?? 0
127+
result.push(content as string)
128+
129+
yield deserializedChunk
130+
}
131+
132+
addSpanEvent(span, Event.GEN_AI_COMPLETION, { 'gen_ai.completion': result.length > 0 ? JSON.stringify([{ role: 'assistant', content: result.join('') }]) : undefined })
133+
span.setStatus({ code: SpanStatusCode.OK })
134+
const stream_attributes: Partial<LLMSpanAttributes> = {
135+
'gen_ai.usage.output_tokens': promptTokens,
136+
'gen_ai.usage.input_tokens': completionTokens,
137+
'gen_ai.usage.total_tokens': promptTokens + completionTokens,
138+
'gen_ai.request.stream': true,
139+
...customAttributes
140+
}
141+
span.setAttributes({ ...inputAttributes, ...stream_attributes })
142+
addSpanEvent(span, Event.STREAM_END)
143+
} catch (error: any) {
144+
span.recordException(error as Exception)
145+
span.setStatus({ code: SpanStatusCode.ERROR })
146+
throw new LangtraceSdkError(error.message as string, error.stack as string)
147+
} finally {
148+
span.end()
149+
}
150+
}

0 commit comments

Comments
 (0)