Skip to content

Commit 47e9c08

Browse files
committed
add AI21 Jamba and Mistral AI model test cases
1 parent fcc6280 commit 47e9c08

File tree

3 files changed

+137
-0
lines changed

3 files changed

+137
-0
lines changed

test/contract-tests/images/applications/TestSimpleApp.AWSSDK.Core/BedrockTests.cs

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,83 @@ public object InvokeModelCohereCommandResponse()
160160
};
161161
}
162162

163+
public void InvokeModelAi21Jamba()
164+
{
165+
bedrockRuntime.InvokeModelAsync(new InvokeModelRequest
166+
{
167+
ModelId = "ai21.jamba-1-5-large-v1:0",
168+
Body = new MemoryStream(Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new
169+
{
170+
messages = new object[]
171+
{
172+
new
173+
{
174+
role = "USER",
175+
content = "sample input text",
176+
},
177+
},
178+
temperature = 0.123,
179+
top_p = 0.456,
180+
max_tokens = 123,
181+
}))),
182+
ContentType = "application/json",
183+
});
184+
return;
185+
}
186+
187+
public object InvokeModelAi21JambaResponse()
188+
{
189+
return new
190+
{
191+
choices = new object[]
192+
{
193+
new
194+
{
195+
finish_reason = "finish_reason",
196+
},
197+
},
198+
usage = new
199+
{
200+
prompt_tokens = 456,
201+
completion_tokens = 789,
202+
},
203+
};
204+
}
205+
206+
public void InvokeModelMistralAi()
207+
{
208+
bedrockRuntime.InvokeModelAsync(new InvokeModelRequest
209+
{
210+
ModelId = "mistral.mistral-7b-instruct-v0:2",
211+
Body = new MemoryStream(Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new
212+
{
213+
// prompt is 72 chars long, input_tokens should be estimated as ceil(72/6) = 12
214+
prompt = "sample input text sample input text sample input text sample input text ",
215+
temperature = 0.123,
216+
top_p = 0.456,
217+
max_tokens = 123,
218+
}))),
219+
ContentType = "application/json",
220+
});
221+
return;
222+
}
223+
224+
public object InvokeModelMistralAiResponse()
225+
{
226+
return new
227+
{
228+
outputs = new object[]
229+
{
230+
new
231+
{
232+
// response is 56 chars long, output_tokens should be estimated as ceil(56/6) = 10
233+
text = "sample output text sample output text sample output text",
234+
stop_reason = "finish_reason",
235+
},
236+
},
237+
};
238+
}
239+
163240
public Task<GetAgentResponse> GetAgent()
164241
{
165242
return bedrockAgent.GetAgentAsync(new GetAgentRequest

test/contract-tests/images/applications/TestSimpleApp.AWSSDK.Core/Program.cs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,14 @@
141141
.WithName("invoke-model-command")
142142
.WithOpenApi();
143143

144+
app.MapGet("bedrock/invokemodel/invoke-model-jamba", (BedrockTests bedrock) => bedrock.InvokeModelAi21Jamba())
145+
.WithName("invoke-model-jamba")
146+
.WithOpenApi();
147+
148+
app.MapGet("bedrock/invokemodel/invoke-model-mistral", (BedrockTests bedrock) => bedrock.InvokeModelMistralAi())
149+
.WithName("invoke-model-mistral")
150+
.WithOpenApi();
151+
144152
app.MapGet("bedrock/getagent/get-agent", (BedrockTests bedrock) => bedrock.GetAgent())
145153
.WithName("get-agent")
146154
.WithOpenApi();
@@ -168,6 +176,8 @@
168176
app.MapPost("model/anthropic.claude-v2:1/invoke", (BedrockTests bedrock) => bedrock.InvokeModelAnthropicClaudeResponse());
169177
app.MapPost("model/meta.llama3-8b-instruct-v1:0/invoke", (BedrockTests bedrock) => bedrock.InvokeModelMetaLlamaResponse());
170178
app.MapPost("model/cohere.command-r-v1:0/invoke", (BedrockTests bedrock) => bedrock.InvokeModelCohereCommandResponse());
179+
app.MapPost("model/ai21.jamba-1-5-large-v1:0/invoke", (BedrockTests bedrock) => bedrock.InvokeModelAi21JambaResponse());
180+
app.MapPost("model/mistral.mistral-7b-instruct-v0:2/invoke", (BedrockTests bedrock) => bedrock.InvokeModelMistralAiResponse());
171181
app.MapGet("agents/test-agent", (BedrockTests bedrock) => bedrock.GetAgentResponse());
172182
app.MapGet("knowledgebases/test-knowledge-base", (BedrockTests bedrock) => bedrock.GetKnowledgeBaseResponse());
173183
app.MapGet("knowledgebases/test-knowledge-base/datasources/test-data-source", (BedrockTests bedrock) => bedrock.GetDataSourceResponse());

test/contract-tests/tests/test/amazon/awssdk/awssdk_test.py

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -418,6 +418,54 @@ def test_bedrock_runtime_invoke_model_command(self):
418418
},
419419
span_name="Bedrock Runtime.InvokeModel",
420420
)
421+
422+
def test_bedrock_runtime_invoke_model_jamba(self):
423+
self.do_test_requests(
424+
"bedrock/invokemodel/invoke-model-jamba",
425+
"GET",
426+
200,
427+
0,
428+
0,
429+
rpc_service="Bedrock Runtime",
430+
remote_service="AWS::BedrockRuntime",
431+
remote_operation="InvokeModel",
432+
remote_resource_type="AWS::Bedrock::Model",
433+
remote_resource_identifier="ai21.jamba-1-5-large-v1:0",
434+
request_specific_attributes={
435+
_GEN_AI_REQUEST_MODEL: "ai21.jamba-1-5-large-v1:0",
436+
_GEN_AI_REQUEST_TEMPERATURE: 0.123,
437+
_GEN_AI_REQUEST_TOP_P: 0.456,
438+
_GEN_AI_REQUEST_MAX_TOKENS: 123,
439+
_GEN_AI_USAGE_INPUT_TOKENS: 456,
440+
_GEN_AI_USAGE_OUTPUT_TOKENS: 789,
441+
_GEN_AI_RESPONSE_FINISH_REASONS: ["finish_reason"],
442+
},
443+
span_name="Bedrock Runtime.InvokeModel",
444+
)
445+
446+
def test_bedrock_runtime_invoke_model_mistral(self):
447+
self.do_test_requests(
448+
"bedrock/invokemodel/invoke-model-mistral",
449+
"GET",
450+
200,
451+
0,
452+
0,
453+
rpc_service="Bedrock Runtime",
454+
remote_service="AWS::BedrockRuntime",
455+
remote_operation="InvokeModel",
456+
remote_resource_type="AWS::Bedrock::Model",
457+
remote_resource_identifier="mistral.mistral-7b-instruct-v0:2",
458+
request_specific_attributes={
459+
_GEN_AI_REQUEST_MODEL: "mistral.mistral-7b-instruct-v0:2",
460+
_GEN_AI_REQUEST_TEMPERATURE: 0.123,
461+
_GEN_AI_REQUEST_TOP_P: 0.456,
462+
_GEN_AI_REQUEST_MAX_TOKENS: 123,
463+
_GEN_AI_USAGE_INPUT_TOKENS: 12,
464+
_GEN_AI_USAGE_OUTPUT_TOKENS: 10,
465+
_GEN_AI_RESPONSE_FINISH_REASONS: ["finish_reason"],
466+
},
467+
span_name="Bedrock Runtime.InvokeModel",
468+
)
421469

422470
def test_bedrock_agent_runtime_invoke_agent(self):
423471
self.do_test_requests(
@@ -687,6 +735,8 @@ def _filter_bedrock_metrics(self, target_metrics: List[Metric]):
687735
"POST model/anthropic.claude-v2:1/invoke",
688736
"POST model/meta.llama3-8b-instruct-v1:0/invoke",
689737
"POST model/cohere.command-r-v1:0/invoke",
738+
"POST model/ai21.jamba-1-5-large-v1:0/invoke",
739+
"POST model/mistral.mistral-7b-instruct-v0:2/invoke",
690740
"POST knowledgebases/test-knowledge-base/retrieve"
691741
}
692742
for metric in target_metrics:

0 commit comments

Comments
 (0)