Skip to content

Commit 60eec28

Browse files
committed
adding tests for rest of the models
1 parent b53d557 commit 60eec28

File tree

2 files changed

+136
-3
lines changed

2 files changed

+136
-3
lines changed

contract-tests/images/applications/aws-sdk/server.js

Lines changed: 47 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -577,7 +577,7 @@ async function handleBedrockRequest(req, res, path) {
577577
body = JSON.stringify({
578578
anthropic_version: 'bedrock-2023-05-31',
579579
max_tokens: 1000,
580-
temperature: 1.1,
580+
temperature: 0.99,
581581
top_p: 1,
582582
messages: [
583583
{
@@ -586,7 +586,52 @@ async function handleBedrockRequest(req, res, path) {
586586
},
587587
],
588588
});
589-
}
589+
}
590+
591+
if (path.includes('meta.llama')) {
592+
modelId = 'meta.llama2-13b-chat-v1';
593+
body = JSON.stringify({
594+
prompt,
595+
max_gen_len: 512,
596+
temperature: 0.5,
597+
top_p: 0.9
598+
});
599+
}
600+
601+
if (path.includes('cohere.command')) {
602+
modelId = 'cohere.command-light-text-v14';
603+
body = JSON.stringify({
604+
prompt,
605+
max_tokens: 512,
606+
temperature: 0.5,
607+
p: 0.65,
608+
});
609+
}
610+
611+
if (path.includes('ai21.jamba')) {
612+
modelId = 'ai21.jamba-1-5-large-v1:0';
613+
body = JSON.stringify({
614+
messages: [
615+
{
616+
role: 'user',
617+
content: prompt,
618+
},
619+
],
620+
top_p: 0.8,
621+
temperature: 0.6,
622+
max_tokens: 512,
623+
});
624+
}
625+
626+
if (path.includes('mistral.mistral')) {
627+
modelId = 'mistral.mistral-7b-instruct-v0:2';
628+
body = JSON.stringify({
629+
prompt,
630+
max_tokens: 4096,
631+
temperature: 0.75,
632+
top_p: 0.99,
633+
});
634+
}
590635

591636
await bedrockRuntimeClient.send(
592637
new InvokeModelCommand({

contract-tests/tests/test/amazon/aws-sdk/aws_sdk_test.py

Lines changed: 89 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -447,12 +447,100 @@ def test_bedrock_runtime_invoke_model_anthropic_claude(self):
447447
request_specific_attributes={
448448
_GEN_AI_REQUEST_MODEL: 'anthropic.claude-v2:1',
449449
_GEN_AI_REQUEST_MAX_TOKENS: 1000,
450-
_GEN_AI_REQUEST_TEMPERATURE: 1.1,
450+
_GEN_AI_REQUEST_TEMPERATURE: 0.99,
451451
_GEN_AI_REQUEST_TOP_P: 1
452452
},
453453
span_name="BedrockRuntime.InvokeModel"
454454
)
455455

456+
def test_bedrock_runtime_invoke_model_meta_llama(self):
457+
self.do_test_requests(
458+
"bedrock/invokemodel/invoke-model/meta.llama2-13b-chat-v1",
459+
"GET",
460+
200,
461+
0,
462+
0,
463+
local_operation="GET /bedrock",
464+
rpc_service="BedrockRuntime",
465+
remote_service="AWS::BedrockRuntime",
466+
remote_operation="InvokeModel",
467+
remote_resource_type="AWS::Bedrock::Model",
468+
remote_resource_identifier='meta.llama2-13b-chat-v1',
469+
request_specific_attributes={
470+
_GEN_AI_REQUEST_MODEL: 'meta.llama2-13b-chat-v1',
471+
_GEN_AI_REQUEST_MAX_TOKENS: 512,
472+
_GEN_AI_REQUEST_TEMPERATURE: 0.5,
473+
_GEN_AI_REQUEST_TOP_P: 0.9
474+
},
475+
span_name="BedrockRuntime.InvokeModel"
476+
)
477+
478+
def test_bedrock_runtime_invoke_model_cohere_command(self):
479+
self.do_test_requests(
480+
"bedrock/invokemodel/invoke-model/cohere.command-light-text-v14",
481+
"GET",
482+
200,
483+
0,
484+
0,
485+
local_operation="GET /bedrock",
486+
rpc_service="BedrockRuntime",
487+
remote_service="AWS::BedrockRuntime",
488+
remote_operation="InvokeModel",
489+
remote_resource_type="AWS::Bedrock::Model",
490+
remote_resource_identifier='cohere.command-light-text-v14',
491+
request_specific_attributes={
492+
_GEN_AI_REQUEST_MODEL: 'cohere.command-light-text-v14',
493+
_GEN_AI_REQUEST_MAX_TOKENS: 512,
494+
_GEN_AI_REQUEST_TEMPERATURE: 0.5,
495+
_GEN_AI_REQUEST_TOP_P: 0.65
496+
},
497+
span_name="BedrockRuntime.InvokeModel"
498+
)
499+
500+
def test_bedrock_runtime_invoke_model_ai21_jamba(self):
501+
self.do_test_requests(
502+
"bedrock/invokemodel/invoke-model/ai21.jamba-1-5-large-v1:0",
503+
"GET",
504+
200,
505+
0,
506+
0,
507+
local_operation="GET /bedrock",
508+
rpc_service="BedrockRuntime",
509+
remote_service="AWS::BedrockRuntime",
510+
remote_operation="InvokeModel",
511+
remote_resource_type="AWS::Bedrock::Model",
512+
remote_resource_identifier='ai21.jamba-1-5-large-v1:0',
513+
request_specific_attributes={
514+
_GEN_AI_REQUEST_MODEL: 'ai21.jamba-1-5-large-v1:0',
515+
_GEN_AI_REQUEST_MAX_TOKENS: 512,
516+
_GEN_AI_REQUEST_TEMPERATURE: 0.6,
517+
_GEN_AI_REQUEST_TOP_P: 0.8
518+
},
519+
span_name="BedrockRuntime.InvokeModel"
520+
)
521+
522+
def test_bedrock_runtime_invoke_model_mistral_mistral(self):
523+
self.do_test_requests(
524+
"bedrock/invokemodel/invoke-model/mistral.mistral-7b-instruct-v0:2",
525+
"GET",
526+
200,
527+
0,
528+
0,
529+
local_operation="GET /bedrock",
530+
rpc_service="BedrockRuntime",
531+
remote_service="AWS::BedrockRuntime",
532+
remote_operation="InvokeModel",
533+
remote_resource_type="AWS::Bedrock::Model",
534+
remote_resource_identifier='mistral.mistral-7b-instruct-v0:2',
535+
request_specific_attributes={
536+
_GEN_AI_REQUEST_MODEL: 'mistral.mistral-7b-instruct-v0:2',
537+
_GEN_AI_REQUEST_MAX_TOKENS: 4096,
538+
_GEN_AI_REQUEST_TEMPERATURE: 0.75,
539+
_GEN_AI_REQUEST_TOP_P: 0.99
540+
},
541+
span_name="BedrockRuntime.InvokeModel"
542+
)
543+
456544
def test_bedrock_get_guardrail(self):
457545
self.do_test_requests(
458546
"bedrock/getguardrail/get-guardrail",

0 commit comments

Comments
 (0)