Skip to content
This repository was archived by the owner on Jun 13, 2025. It is now read-only.

Commit 64ffdf0

Browse files
saimanojharshithmullapudi
authored andcommitted
Fix: LLM model name changes
1 parent dbb2a3f commit 64ffdf0

File tree

2 files changed

+17
-24
lines changed

2 files changed

+17
-24
lines changed

apps/server/src/modules/action-event/action-event.service.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import { Injectable } from '@nestjs/common';
2-
import { ConfigService } from '@nestjs/config';
32
import {
43
ActionEntity,
54
ActionEvent,
@@ -29,7 +28,6 @@ export default class ActionEventService {
2928
private prisma: PrismaService,
3029

3130
private integrationsService: IntegrationsService,
32-
private configService: ConfigService,
3331
) {}
3432

3533
async createEvent(event: CreateActionEvent) {
@@ -60,8 +58,6 @@ export default class ActionEventService {
6058

6159
const processedIds = await this.triggerActions(actionEvent);
6260
this.updateEvent(actionEvent.id, processedIds);
63-
64-
await this.triggerWebhookAction(actionEvent, event.workspaceId);
6561
}
6662
}
6763
}

apps/server/src/modules/ai-requests/ai-requests.services.ts

Lines changed: 17 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,7 @@ import { anthropic } from '@ai-sdk/anthropic';
22
import { openai } from '@ai-sdk/openai';
33
import { Injectable } from '@nestjs/common';
44
import { ConfigService } from '@nestjs/config';
5-
import {
6-
AIStreamResponse,
7-
GetAIRequestDTO,
8-
LLMMappings,
9-
LLMModelEnum,
10-
} from '@tegonhq/types';
5+
import { AIStreamResponse, GetAIRequestDTO } from '@tegonhq/types';
116
import {
127
CoreMessage,
138
CoreUserMessage,
@@ -74,7 +69,7 @@ export default class AIRequestsService {
7469
try {
7570
return await this.makeModelCall(
7671
stream,
77-
model as LLMModelEnum,
72+
model,
7873
messages,
7974
(text: string, model: string) => {
8075
this.createRecord(
@@ -98,33 +93,35 @@ export default class AIRequestsService {
9893

9994
async makeModelCall(
10095
stream: boolean,
101-
model: LLMModelEnum,
96+
model: string,
10297
messages: CoreMessage[],
10398
onFinish: (text: string, model: string) => void,
10499
) {
105100
let modelInstance;
106101
let finalModel: string;
107-
108-
if (!this.configService.get('OPENAI_API_KEY')) {
102+
if (
103+
!this.configService.get('OPENAI_API_KEY') ||
104+
!this.configService.get('ANTHROPIC_API_KEY')
105+
) {
109106
model = null;
110107
}
111108

112109
switch (model) {
113-
case LLMModelEnum.GPT35TURBO:
114-
case LLMModelEnum.GPT4TURBO:
115-
case LLMModelEnum.GPT4O:
116-
finalModel = LLMMappings[model];
110+
case 'gpt-3.5-turbo':
111+
case 'gpt-4-turbo':
112+
case 'gpt-4o':
113+
finalModel = model;
117114
this.logger.info({
118-
message: `Sending request to OpenAI with model: ${model}`,
115+
message: `Sending request to OpenAI with model: ${finalModel}`,
119116
where: `AIRequestsService.makeModelCall`,
120117
});
121-
modelInstance = openai(model);
118+
modelInstance = openai(finalModel);
122119
break;
123120

124-
case LLMModelEnum.CLAUDEOPUS:
125-
case LLMModelEnum.CLAUDESONNET:
126-
case LLMModelEnum.CLAUDEHAIKU:
127-
finalModel = LLMMappings[model];
121+
case 'claude-3-opus-20240229':
122+
case 'claude-3-5-sonnet-20241022':
123+
case 'claude-3-haiku-20240307':
124+
finalModel = model;
128125
this.logger.info({
129126
message: `Sending request to Claude with model: ${finalModel}`,
130127
where: `AIRequestsService.makeModelCall`,

0 commit comments

Comments
 (0)