Skip to content

Commit d4d3af5

Browse files
author
awstools
committed
feat(client-bedrock-agent): Removing support for topK property in PromptModelInferenceConfiguration object, Making PromptTemplateConfiguration property as required, Limiting the maximum PromptVariant to 1
1 parent f4c5267 commit d4d3af5

File tree

13 files changed

+6
-32
lines changed

13 files changed

+6
-32
lines changed

clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,6 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB
9191
* text: { // PromptModelInferenceConfiguration
9292
* temperature: Number("float"),
9393
* topP: Number("float"),
94-
* topK: Number("int"),
9594
* maxTokens: Number("int"),
9695
* stopSequences: [ // StopSequences
9796
* "STRING_VALUE",
@@ -221,7 +220,6 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB
221220
* // text: { // PromptModelInferenceConfiguration
222221
* // temperature: Number("float"),
223222
* // topP: Number("float"),
224-
* // topK: Number("int"),
225223
* // maxTokens: Number("int"),
226224
* // stopSequences: [ // StopSequences
227225
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@ export interface CreateFlowVersionCommandOutput extends CreateFlowVersionRespons
102102
* // text: { // PromptModelInferenceConfiguration
103103
* // temperature: Number("float"),
104104
* // topP: Number("float"),
105-
* // topK: Number("int"),
106105
* // maxTokens: Number("int"),
107106
* // stopSequences: [ // StopSequences
108107
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad
6464
* text: { // PromptModelInferenceConfiguration
6565
* temperature: Number("float"),
6666
* topP: Number("float"),
67-
* topK: Number("int"),
6867
* maxTokens: Number("int"),
6968
* stopSequences: [ // StopSequences
7069
* "STRING_VALUE",
@@ -110,7 +109,6 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad
110109
* // text: { // PromptModelInferenceConfiguration
111110
* // temperature: Number("float"),
112111
* // topP: Number("float"),
113-
* // topK: Number("int"),
114112
* // maxTokens: Number("int"),
115113
* // stopSequences: [ // StopSequences
116114
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,6 @@ export interface CreatePromptVersionCommandOutput extends CreatePromptVersionRes
7373
* // text: { // PromptModelInferenceConfiguration
7474
* // temperature: Number("float"),
7575
* // topP: Number("float"),
76-
* // topK: Number("int"),
7776
* // maxTokens: Number("int"),
7877
* // stopSequences: [ // StopSequences
7978
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/GetFlowCommand.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,6 @@ export interface GetFlowCommandOutput extends GetFlowResponse, __MetadataBearer
9797
* // text: { // PromptModelInferenceConfiguration
9898
* // temperature: Number("float"),
9999
* // topP: Number("float"),
100-
* // topK: Number("int"),
101100
* // maxTokens: Number("int"),
102101
* // stopSequences: [ // StopSequences
103102
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,6 @@ export interface GetFlowVersionCommandOutput extends GetFlowVersionResponse, __M
101101
* // text: { // PromptModelInferenceConfiguration
102102
* // temperature: Number("float"),
103103
* // topP: Number("float"),
104-
* // topK: Number("int"),
105104
* // maxTokens: Number("int"),
106105
* // stopSequences: [ // StopSequences
107106
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ export interface GetIngestionJobCommandInput extends GetIngestionJobRequest {}
2828
export interface GetIngestionJobCommandOutput extends GetIngestionJobResponse, __MetadataBearer {}
2929

3030
/**
31-
* <p>Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Lanaguage Models (LLMs) can use your data.</p>
31+
* <p>Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Language Models (LLMs) can use your data.</p>
3232
* @example
3333
* Use a bare-bones client and the command you need to make an API call.
3434
* ```javascript

clients/client-bedrock-agent/src/commands/GetPromptCommand.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,6 @@ export interface GetPromptCommandOutput extends GetPromptResponse, __MetadataBea
6565
* // text: { // PromptModelInferenceConfiguration
6666
* // temperature: Number("float"),
6767
* // topP: Number("float"),
68-
* // topK: Number("int"),
6968
* // maxTokens: Number("int"),
7069
* // stopSequences: [ // StopSequences
7170
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,6 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB
9191
* text: { // PromptModelInferenceConfiguration
9292
* temperature: Number("float"),
9393
* topP: Number("float"),
94-
* topK: Number("int"),
9594
* maxTokens: Number("int"),
9695
* stopSequences: [ // StopSequences
9796
* "STRING_VALUE",
@@ -218,7 +217,6 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB
218217
* // text: { // PromptModelInferenceConfiguration
219218
* // temperature: Number("float"),
220219
* // topP: Number("float"),
221-
* // topK: Number("int"),
222220
* // maxTokens: Number("int"),
223221
* // stopSequences: [ // StopSequences
224222
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad
6464
* text: { // PromptModelInferenceConfiguration
6565
* temperature: Number("float"),
6666
* topP: Number("float"),
67-
* topK: Number("int"),
6867
* maxTokens: Number("int"),
6968
* stopSequences: [ // StopSequences
7069
* "STRING_VALUE",
@@ -107,7 +106,6 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad
107106
* // text: { // PromptModelInferenceConfiguration
108107
* // temperature: Number("float"),
109108
* // topP: Number("float"),
110-
* // topK: Number("int"),
111109
* // maxTokens: Number("int"),
112110
* // stopSequences: [ // StopSequences
113111
* // "STRING_VALUE",

0 commit comments

Comments
 (0)