Skip to content

Commit 69dcf2d

Browse files
committed
Merge branch 'main' into backend_redis_fixes
2 parents 1d9b887 + 8412e85 commit 69dcf2d

File tree

2 files changed

+365
-366
lines changed

2 files changed

+365
-366
lines changed

backend/src/entities/ai/amazon-bedrock/amazon-bedrock.ai.provider.ts

Lines changed: 29 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -4,38 +4,35 @@ import { IAIProvider } from './ai-provider.interface.js';
44

55
@Injectable()
66
export class AmazonBedrockAiProvider implements IAIProvider {
7-
private readonly bedrockRuntimeClient: BedrockRuntimeClient;
8-
private readonly modelId: string = 'global.anthropic.claude-sonnet-4-5-20250929-v1:0';
9-
private readonly temperature: number = 0.7;
10-
private readonly maxTokens: number = 1024;
11-
private readonly region: string = 'us-west-2';
12-
private readonly topP: number = 0.9;
7+
private readonly bedrockRuntimeClient: BedrockRuntimeClient;
8+
private readonly modelId: string = 'global.anthropic.claude-sonnet-4-5-20250929-v1:0';
9+
private readonly maxTokens: number = 1024;
1310

14-
constructor() {
15-
this.bedrockRuntimeClient = new BedrockRuntimeClient({
16-
region: this.region,
17-
});
18-
}
19-
public async generateResponse(prompt: string): Promise<string> {
20-
const conversation = [
21-
{
22-
role: 'user' as const,
23-
content: [{ text: prompt }],
24-
},
25-
];
11+
constructor() {
12+
this.bedrockRuntimeClient = new BedrockRuntimeClient();
13+
}
14+
public async generateResponse(prompt: string): Promise<string> {
15+
const conversation = [
16+
{
17+
role: 'user' as const,
18+
content: [{ text: prompt }],
19+
},
20+
];
2621

27-
const command = new ConverseCommand({
28-
modelId: this.modelId,
29-
messages: conversation,
30-
inferenceConfig: { maxTokens: this.maxTokens, temperature: this.temperature, topP: this.topP },
31-
});
32-
try {
33-
const response = await this.bedrockRuntimeClient.send(command);
34-
const responseText = response.output.message?.content[0].text;
35-
return responseText || 'No response generated.';
36-
} catch (error) {
37-
console.error('Error generating AI response:', error);
38-
throw new Error('Failed to generate AI response.');
39-
}
40-
}
22+
const command = new ConverseCommand({
23+
modelId: this.modelId,
24+
messages: conversation,
25+
inferenceConfig: { maxTokens: this.maxTokens },
26+
});
27+
try {
28+
const response = await this.bedrockRuntimeClient.send(command);
29+
console.info('AI response received from Amazon Bedrock.');
30+
const responseText = response.output.message?.content[0].text;
31+
console.info('AI response text. ', responseText);
32+
return responseText || 'No response generated.';
33+
} catch (error) {
34+
console.error('Error generating AI response:', error);
35+
throw new Error('Failed to generate AI response.');
36+
}
37+
}
4138
}

0 commit comments

Comments
 (0)