Skip to content

Commit c25d7de

Browse files
committed
fix: mistral ai model conditional
1 parent a3355ea commit c25d7de

File tree

1 file changed

+2
-2
lines changed
  • aws-distro-opentelemetry-node-autoinstrumentation/src/patches/aws/services

1 file changed

+2
-2
lines changed

aws-distro-opentelemetry-node-autoinstrumentation/src/patches/aws/services/bedrock.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
284284
if (requestBody.top_p !== undefined) {
285285
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
286286
}
287-
} else if (modelId.includes('mistral.mistral')) {
287+
} else if (modelId.includes('mistral')) {
288288
if (requestBody.prompt !== undefined) {
289289
// NOTE: We approximate the token count since this value is not directly available in the body
290290
// According to Bedrock docs they use (total_chars / 6) to approximate token count for pricing.
@@ -386,7 +386,7 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
386386
responseBody.choices[0].finish_reason,
387387
]);
388388
}
389-
} else if (currentModelId.includes('mistral.mistral')) {
389+
} else if (currentModelId.includes('mistral')) {
390390
if (responseBody.outputs?.[0]?.text !== undefined) {
391391
span.setAttribute(
392392
AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS,

0 commit comments

Comments
 (0)