Skip to content
Open
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,10 @@ MASTODON_CLIENT_SECRET=""

# Misc Settings
OPENAI_API_KEY=""
OPENAI_BASE_URL="https://api.openai.com/v1" #default, can be omitted
OPENAI_TEXT_MODEL="gpt-4.1" #default, can be omitted
OPENAI_TEXT_MODEL_MINI="gpt-4o-mini" #default, can be omitted
OPENAI_IMAGE_MODEL="dall-e-3" #default, can be omitted
NEXT_PUBLIC_DISCORD_SUPPORT=""
NEXT_PUBLIC_POLOTNO=""
# NOT_SECURED=false
Expand Down
7 changes: 3 additions & 4 deletions apps/backend/src/api/routes/copilot.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ export class CopilotController {
@Post('/chat')
chat(@Req() req: Request, @Res() res: Response) {
if (
process.env.OPENAI_API_KEY === undefined ||
process.env.OPENAI_API_KEY === ''
!process.env.OPENAI_API_KEY && !process.env.OPENAI_BASE_URL // if using offical OpenAI API, abort if no key
) {
Logger.warn('OpenAI API key not set, chat functionality will not work');
return;
Expand All @@ -29,8 +28,8 @@ export class CopilotController {
// @ts-ignore
req?.body?.variables?.data?.metadata?.requestType ===
'TextareaCompletion'
? 'gpt-4o-mini'
: 'gpt-4.1',
? (process.env.OPENAI_TEXT_MODEL_MINI || 'gpt-4o-mini')
: (process.env.OPENAI_TEXT_MODEL || 'gpt-4.1'),
}),
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@ import { agentTopics } from '@gitroom/nestjs-libraries/agent/agent.topics';
import { PostsService } from '@gitroom/nestjs-libraries/database/prisma/posts/posts.service';

const model = new ChatOpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-4o-2024-08-06',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4o-2024-08-06',
temperature: 0,
});

Expand Down
6 changes: 2 additions & 4 deletions libraries/nestjs-libraries/src/agent/agent.graph.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,12 @@ const tools = !process.env.TAVILY_API_KEY
const toolNode = new ToolNode(tools);

const model = new ChatOpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
temperature: 0.7,
});

const dalle = new DallEAPIWrapper({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'dall-e-3',
model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3',
});

interface WorkflowChannelsState {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,12 @@ interface WorkflowChannelsState {
}

const model = new ChatOpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
temperature: 0.7,
});

const dalle = new DallEAPIWrapper({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-image-1',
model: process.env.OPENAI_IMAGE_MODEL || 'gpt-image-1',
});
Comment on lines 34 to 41
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Fix invalid model names in both ChatOpenAI and DallEAPIWrapper.

Both fallback model names are invalid:

  1. 'gpt-4.1' is not a valid OpenAI text model
  2. 'gpt-image-1' is not a valid OpenAI image model and inconsistent with other files that use 'dall-e-3'

Apply this diff to use valid model names:

-  model:  process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
+  model: process.env.OPENAI_TEXT_MODEL || 'gpt-4',
-  model: process.env.OPENAI_IMAGE_MODEL || 'gpt-image-1',
+  model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3',
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
const model = new ChatOpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
temperature: 0.7,
});
const dalle = new DallEAPIWrapper({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-image-1',
model: process.env.OPENAI_IMAGE_MODEL || 'gpt-image-1',
});
const model = new ChatOpenAI({
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4',
temperature: 0.7,
});
const dalle = new DallEAPIWrapper({
model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3',
});
🤖 Prompt for AI Agents
In libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts
between lines 34 and 41, the fallback model names for ChatOpenAI and
DallEAPIWrapper are invalid. Replace 'gpt-4.1' with a valid OpenAI text model
name such as 'gpt-4', and replace 'gpt-image-1' with the consistent and valid
image model name 'dall-e-3' to align with other files.


const generateContent = z.object({
Expand Down
18 changes: 8 additions & 10 deletions libraries/nestjs-libraries/src/openai/openai.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@ import { shuffle } from 'lodash';
import { zodResponseFormat } from 'openai/helpers/zod';
import { z } from 'zod';

const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
});
const openai = new OpenAI({});

const PicturePrompt = z.object({
prompt: z.string(),
Expand All @@ -23,7 +21,7 @@ export class OpenaiService {
await openai.images.generate({
prompt,
response_format: isUrl ? 'url' : 'b64_json',
model: 'dall-e-3',
model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3',
})
).data[0];

Expand All @@ -34,7 +32,7 @@ export class OpenaiService {
return (
(
await openai.beta.chat.completions.parse({
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
messages: [
{
role: 'system',
Expand Down Expand Up @@ -89,7 +87,7 @@ export class OpenaiService {
],
n: 5,
temperature: 1,
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
}),
openai.chat.completions.create({
messages: [
Expand All @@ -105,7 +103,7 @@ export class OpenaiService {
],
n: 5,
temperature: 1,
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
}),
])
).flatMap((p) => p.choices);
Expand Down Expand Up @@ -143,7 +141,7 @@ export class OpenaiService {
content,
},
],
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
});

const { content: articleContent } = websiteContent.choices[0].message;
Expand All @@ -163,7 +161,7 @@ export class OpenaiService {
const posts =
(
await openai.beta.chat.completions.parse({
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
messages: [
{
role: 'system',
Expand Down Expand Up @@ -196,7 +194,7 @@ export class OpenaiService {
return (
(
await openai.beta.chat.completions.parse({
model: 'gpt-4.1',
model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1',
messages: [
{
role: 'system',
Expand Down