Skip to content

Commit fc898fb

Browse files
committed
apply lint
1 parent 2061da2 commit fc898fb

File tree

2 files changed

+37
-31
lines changed

2 files changed

+37
-31
lines changed

aws-distro-opentelemetry-node-autoinstrumentation/src/aws-span-processing-util.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -47,12 +47,12 @@ export class AwsSpanProcessingUtil {
4747
// TODO: Use Semantic Conventions once upgraded
4848
static GEN_AI_REQUEST_MODEL: string = 'gen_ai.request.model';
4949
static GEN_AI_SYSTEM: string = 'gen_ai.system';
50-
static GEN_AI_REQUEST_MAX_TOKENS = "gen_ai.request.max_tokens"
51-
static GEN_AI_REQUEST_TEMPERATURE = "gen_ai.request.temperature"
52-
static GEN_AI_REQUEST_TOP_P = "gen_ai.request.top_p"
53-
static GEN_AI_RESPONSE_FINISH_REASONS = "gen_ai.response.finish_reasons"
54-
static GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"
55-
static GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens"
50+
static GEN_AI_REQUEST_MAX_TOKENS: string = 'gen_ai.request.max_tokens';
51+
static GEN_AI_REQUEST_TEMPERATURE: string = 'gen_ai.request.temperature';
52+
static GEN_AI_REQUEST_TOP_P: string = 'gen_ai.request.top_p';
53+
static GEN_AI_RESPONSE_FINISH_REASONS: string = 'gen_ai.response.finish_reasons';
54+
static GEN_AI_USAGE_INPUT_TOKENS: string = 'gen_ai.usage.input_tokens';
55+
static GEN_AI_USAGE_OUTPUT_TOKENS: string = 'gen_ai.usage.output_tokens';
5656

5757
static getDialectKeywords(): string[] {
5858
return SQL_DIALECT_KEYWORDS_JSON.keywords;

aws-distro-opentelemetry-node-autoinstrumentation/src/patches/aws/services/bedrock.ts

Lines changed: 31 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -213,14 +213,14 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
213213

214214
if (request.commandInput?.body) {
215215
const requestBody = JSON.parse(request.commandInput.body);
216-
if (modelId.includes("amazon.titan")) {
216+
if (modelId.includes('amazon.titan')) {
217217
if (requestBody.textGenerationConfig) {
218218
const config = requestBody.textGenerationConfig;
219219
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TEMPERATURE] = config.temperature;
220220
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = config.topP;
221221
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = config.maxTokenCount;
222222
}
223-
} else if (modelId.includes("anthropic.claude")) {
223+
} else if (modelId.includes('anthropic.claude')) {
224224
if (requestBody.max_tokens) {
225225
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_tokens;
226226
}
@@ -230,7 +230,7 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
230230
if (requestBody.top_p) {
231231
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
232232
}
233-
} else if (modelId.includes("meta.llama")) {
233+
} else if (modelId.includes('meta.llama')) {
234234
if (requestBody.max_gen_len) {
235235
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_gen_len;
236236
}
@@ -240,7 +240,7 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
240240
if (requestBody.top_p) {
241241
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
242242
}
243-
} else if (modelId.includes("cohere.command")) {
243+
} else if (modelId.includes('cohere.command')) {
244244
if (requestBody.max_tokens) {
245245
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_tokens;
246246
}
@@ -249,8 +249,8 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
249249
}
250250
if (requestBody.p) {
251251
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.p;
252-
}
253-
} else if (modelId.includes("ai21.jamba")) {
252+
}
253+
} else if (modelId.includes('ai21.jamba')) {
254254
if (requestBody.max_tokens) {
255255
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_tokens;
256256
}
@@ -260,7 +260,7 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
260260
if (requestBody.top_p) {
261261
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
262262
}
263-
} else if (modelId.includes("mistral.mistral")) {
263+
} else if (modelId.includes('mistral.mistral')) {
264264
if (requestBody.prompt) {
265265
spanAttributes[AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS] = Math.ceil(requestBody.prompt.length / 6);
266266
}
@@ -274,7 +274,7 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
274274
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
275275
}
276276
}
277-
}
277+
}
278278

279279
return {
280280
isIncoming,
@@ -289,17 +289,19 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
289289
if (response.data?.body) {
290290
const decodedResponseBody = new TextDecoder().decode(response.data.body);
291291
const responseBody = JSON.parse(decodedResponseBody);
292-
if (currentModelId.includes("amazon.titan")) {
292+
if (currentModelId.includes('amazon.titan')) {
293293
if (responseBody.inputTextTokenCount) {
294294
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, responseBody.inputTextTokenCount);
295295
}
296296
if (responseBody.results?.[0]?.tokenCount) {
297297
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS, responseBody.results[0].tokenCount);
298298
}
299299
if (responseBody.results?.[0]?.completionReason) {
300-
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [responseBody.results[0].completionReason]);
300+
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [
301+
responseBody.results[0].completionReason,
302+
]);
301303
}
302-
} else if (currentModelId.includes("anthropic.claude")) {
304+
} else if (currentModelId.includes('anthropic.claude')) {
303305
if (responseBody.usage?.input_tokens) {
304306
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, responseBody.usage.input_tokens);
305307
}
@@ -309,7 +311,7 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
309311
if (responseBody.stop_reason) {
310312
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [responseBody.stop_reason]);
311313
}
312-
} else if (currentModelId.includes("meta.llama")) {
314+
} else if (currentModelId.includes('meta.llama')) {
313315
if (responseBody.prompt_token_count) {
314316
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, responseBody.prompt_token_count);
315317
}
@@ -319,40 +321,44 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
319321
if (responseBody.stop_reason) {
320322
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [responseBody.stop_reason]);
321323
}
322-
} else if (currentModelId.includes("cohere.command")) {
324+
} else if (currentModelId.includes('cohere.command')) {
323325
if (responseBody.prompt) {
324326
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, Math.ceil(responseBody.prompt.length / 6));
325327
}
326328
if (responseBody.generations?.[0]?.text) {
327-
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS, Math.ceil(responseBody.generations[0].text.length / 6));
329+
span.setAttribute(
330+
AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS,
331+
Math.ceil(responseBody.generations[0].text.length / 6)
332+
);
328333
}
329334
if (responseBody.generations?.[0]?.finish_reason) {
330-
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [responseBody.generations[0].finish_reason]);
335+
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [
336+
responseBody.generations[0].finish_reason,
337+
]);
331338
}
332-
} else if (currentModelId.includes("ai21.jamba")) {
339+
} else if (currentModelId.includes('ai21.jamba')) {
333340
if (responseBody.usage?.prompt_tokens) {
334341
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, responseBody.usage.prompt_tokens);
335342
}
336343
if (responseBody.usage?.completion_tokens) {
337344
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS, responseBody.usage.completion_tokens);
338345
}
339346
if (responseBody.choices?.[0]?.finish_reason) {
340-
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [responseBody.choices[0].finish_reason]);
347+
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [
348+
responseBody.choices[0].finish_reason,
349+
]);
341350
}
342-
} else if (currentModelId.includes("mistral.mistral")) {
351+
} else if (currentModelId.includes('mistral.mistral')) {
343352
if (responseBody.outputs?.[0]?.text) {
344353
span.setAttribute(
345-
AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS,
354+
AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS,
346355
Math.ceil(responseBody.outputs[0].text.length / 6)
347356
);
348357
}
349358
if (responseBody.outputs?.[0]?.stop_reason) {
350-
span.setAttribute(
351-
AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS,
352-
responseBody.outputs[0].stop_reason
353-
);
359+
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, responseBody.outputs[0].stop_reason);
354360
}
355361
}
356-
}
357-
}
362+
}
363+
}
358364
}

0 commit comments

Comments
 (0)