Skip to content

Commit 3a57302

Browse files
committed
fix: add strict undefined checks
1 parent c730602 commit 3a57302

File tree

1 file changed

+41
-38
lines changed
  • aws-distro-opentelemetry-node-autoinstrumentation/src/patches/aws/services

1 file changed

+41
-38
lines changed

aws-distro-opentelemetry-node-autoinstrumentation/src/patches/aws/services/bedrock.ts

Lines changed: 41 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -214,63 +214,66 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
214214
if (request.commandInput?.body) {
215215
const requestBody = JSON.parse(request.commandInput.body);
216216
if (modelId.includes('amazon.titan')) {
217-
if (requestBody.textGenerationConfig) {
218-
const config = requestBody.textGenerationConfig;
219-
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TEMPERATURE] = config.temperature;
220-
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = config.topP;
221-
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = config.maxTokenCount;
217+
if (requestBody.textGenerationConfig?.temperature !== undefined) {
218+
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TEMPERATURE] = requestBody.textGenerationConfig.temperature;
219+
}
220+
if (requestBody.textGenerationConfig?.topP !== undefined) {
221+
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.textGenerationConfig.topP;
222+
}
223+
if (requestBody.textGenerationConfig?.maxTokenCount != undefined) {
224+
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.textGenerationConfig.maxTokenCount;
222225
}
223226
} else if (modelId.includes('anthropic.claude')) {
224-
if (requestBody.max_tokens) {
227+
if (requestBody.max_tokens !== undefined) {
225228
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_tokens;
226229
}
227-
if (requestBody.temperature) {
230+
if (requestBody.temperature !== undefined) {
228231
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TEMPERATURE] = requestBody.temperature;
229232
}
230-
if (requestBody.top_p) {
233+
if (requestBody.top_p !== undefined) {
231234
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
232235
}
233236
} else if (modelId.includes('meta.llama')) {
234-
if (requestBody.max_gen_len) {
237+
if (requestBody.max_gen_len !== undefined) {
235238
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_gen_len;
236239
}
237-
if (requestBody.temperature) {
240+
if (requestBody.temperature !== undefined) {
238241
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TEMPERATURE] = requestBody.temperature;
239242
}
240-
if (requestBody.top_p) {
243+
if (requestBody.top_p !== undefined) {
241244
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
242245
}
243246
} else if (modelId.includes('cohere.command')) {
244-
if (requestBody.max_tokens) {
247+
if (requestBody.max_tokens !== undefined) {
245248
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_tokens;
246249
}
247-
if (requestBody.temperature) {
250+
if (requestBody.temperature !== undefined) {
248251
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TEMPERATURE] = requestBody.temperature;
249252
}
250-
if (requestBody.p) {
253+
if (requestBody.p !== undefined) {
251254
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.p;
252255
}
253256
} else if (modelId.includes('ai21.jamba')) {
254-
if (requestBody.max_tokens) {
257+
if (requestBody.max_tokens !== undefined) {
255258
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_tokens;
256259
}
257-
if (requestBody.temperature) {
260+
if (requestBody.temperature !== undefined) {
258261
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TEMPERATURE] = requestBody.temperature;
259262
}
260-
if (requestBody.top_p) {
263+
if (requestBody.top_p !== undefined) {
261264
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
262265
}
263266
} else if (modelId.includes('mistral.mistral')) {
264-
if (requestBody.prompt) {
267+
if (requestBody.prompt !== undefined) {
265268
spanAttributes[AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS] = Math.ceil(requestBody.prompt.length / 6);
266269
}
267-
if (requestBody.max_tokens) {
270+
if (requestBody.max_tokens !== undefined) {
268271
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_MAX_TOKENS] = requestBody.max_tokens;
269272
}
270-
if (requestBody.temperature) {
273+
if (requestBody.temperature !== undefined) {
271274
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TEMPERATURE] = requestBody.temperature;
272275
}
273-
if (requestBody.top_p) {
276+
if (requestBody.top_p !== undefined) {
274277
spanAttributes[AwsSpanProcessingUtil.GEN_AI_REQUEST_TOP_P] = requestBody.top_p;
275278
}
276279
}
@@ -290,72 +293,72 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
290293
const decodedResponseBody = new TextDecoder().decode(response.data.body);
291294
const responseBody = JSON.parse(decodedResponseBody);
292295
if (currentModelId.includes('amazon.titan')) {
293-
if (responseBody.inputTextTokenCount) {
296+
if (responseBody.inputTextTokenCount !== undefined) {
294297
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, responseBody.inputTextTokenCount);
295298
}
296-
if (responseBody.results?.[0]?.tokenCount) {
299+
if (responseBody.results?.[0]?.tokenCount !== undefined) {
297300
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS, responseBody.results[0].tokenCount);
298301
}
299-
if (responseBody.results?.[0]?.completionReason) {
302+
if (responseBody.results?.[0]?.completionReason !== undefined) {
300303
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [
301304
responseBody.results[0].completionReason,
302305
]);
303306
}
304307
} else if (currentModelId.includes('anthropic.claude')) {
305-
if (responseBody.usage?.input_tokens) {
308+
if (responseBody.usage?.input_tokens !== undefined) {
306309
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, responseBody.usage.input_tokens);
307310
}
308-
if (responseBody.usage?.output_tokens) {
311+
if (responseBody.usage?.output_tokens !== undefined) {
309312
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS, responseBody.usage.output_tokens);
310313
}
311-
if (responseBody.stop_reason) {
314+
if (responseBody.stop_reason !== undefined) {
312315
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [responseBody.stop_reason]);
313316
}
314317
} else if (currentModelId.includes('meta.llama')) {
315-
if (responseBody.prompt_token_count) {
318+
if (responseBody.prompt_token_count !== undefined) {
316319
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, responseBody.prompt_token_count);
317320
}
318-
if (responseBody.generation_token_count) {
321+
if (responseBody.generation_token_count !== undefined) {
319322
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS, responseBody.generation_token_count);
320323
}
321-
if (responseBody.stop_reason) {
324+
if (responseBody.stop_reason !== undefined) {
322325
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [responseBody.stop_reason]);
323326
}
324327
} else if (currentModelId.includes('cohere.command')) {
325-
if (responseBody.prompt) {
328+
if (responseBody.prompt !== undefined) {
326329
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, Math.ceil(responseBody.prompt.length / 6));
327330
}
328-
if (responseBody.generations?.[0]?.text) {
331+
if (responseBody.generations?.[0]?.text !== undefined) {
329332
span.setAttribute(
330333
AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS,
331334
Math.ceil(responseBody.generations[0].text.length / 6)
332335
);
333336
}
334-
if (responseBody.generations?.[0]?.finish_reason) {
337+
if (responseBody.generations?.[0]?.finish_reason !== undefined) {
335338
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [
336339
responseBody.generations[0].finish_reason,
337340
]);
338341
}
339342
} else if (currentModelId.includes('ai21.jamba')) {
340-
if (responseBody.usage?.prompt_tokens) {
343+
if (responseBody.usage?.prompt_tokens !== undefined) {
341344
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_INPUT_TOKENS, responseBody.usage.prompt_tokens);
342345
}
343-
if (responseBody.usage?.completion_tokens) {
346+
if (responseBody.usage?.completion_tokens !== undefined) {
344347
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS, responseBody.usage.completion_tokens);
345348
}
346-
if (responseBody.choices?.[0]?.finish_reason) {
349+
if (responseBody.choices?.[0]?.finish_reason !== undefined) {
347350
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [
348351
responseBody.choices[0].finish_reason,
349352
]);
350353
}
351354
} else if (currentModelId.includes('mistral.mistral')) {
352-
if (responseBody.outputs?.[0]?.text) {
355+
if (responseBody.outputs?.[0]?.text !== undefined) {
353356
span.setAttribute(
354357
AwsSpanProcessingUtil.GEN_AI_USAGE_OUTPUT_TOKENS,
355358
Math.ceil(responseBody.outputs[0].text.length / 6)
356359
);
357360
}
358-
if (responseBody.outputs?.[0]?.stop_reason) {
361+
if (responseBody.outputs?.[0]?.stop_reason !== undefined) {
359362
span.setAttribute(AwsSpanProcessingUtil.GEN_AI_RESPONSE_FINISH_REASONS, [
360363
responseBody.outputs[0].stop_reason,
361364
]);

0 commit comments

Comments
 (0)