diff --git a/.env.example b/.env.example index 9a017b3ac..a5fcb68e3 100644 --- a/.env.example +++ b/.env.example @@ -74,6 +74,10 @@ MASTODON_CLIENT_SECRET="" # Misc Settings OPENAI_API_KEY="" +OPENAI_BASE_URL="https://api.openai.com/v1" #default, can be omitted +OPENAI_TEXT_MODEL="gpt-4.1" #default, can be omitted +OPENAI_TEXT_MODEL_MINI="gpt-4o-mini" #default, can be omitted +OPENAI_IMAGE_MODEL="dall-e-3" #default, can be omitted NEXT_PUBLIC_DISCORD_SUPPORT="" NEXT_PUBLIC_POLOTNO="" # NOT_SECURED=false diff --git a/apps/backend/src/api/routes/copilot.controller.ts b/apps/backend/src/api/routes/copilot.controller.ts index 581343fbb..97b8c76d4 100644 --- a/apps/backend/src/api/routes/copilot.controller.ts +++ b/apps/backend/src/api/routes/copilot.controller.ts @@ -14,8 +14,7 @@ export class CopilotController { @Post('/chat') chat(@Req() req: Request, @Res() res: Response) { if ( - process.env.OPENAI_API_KEY === undefined || - process.env.OPENAI_API_KEY === '' + !process.env.OPENAI_API_KEY && !process.env.OPENAI_BASE_URL // if using offical OpenAI API, abort if no key ) { Logger.warn('OpenAI API key not set, chat functionality will not work'); return; @@ -29,8 +28,8 @@ export class CopilotController { // @ts-ignore req?.body?.variables?.data?.metadata?.requestType === 'TextareaCompletion' - ? 'gpt-4o-mini' - : 'gpt-4.1', + ? (process.env.OPENAI_TEXT_MODEL_MINI || 'gpt-4o-mini') + : (process.env.OPENAI_TEXT_MODEL || 'gpt-4.1'), }), }); diff --git a/libraries/nestjs-libraries/src/agent/agent.graph.insert.service.ts b/libraries/nestjs-libraries/src/agent/agent.graph.insert.service.ts index 1a60b7399..b263e1c90 100644 --- a/libraries/nestjs-libraries/src/agent/agent.graph.insert.service.ts +++ b/libraries/nestjs-libraries/src/agent/agent.graph.insert.service.ts @@ -9,8 +9,7 @@ import { agentTopics } from '@gitroom/nestjs-libraries/agent/agent.topics'; import { PostsService } from '@gitroom/nestjs-libraries/database/prisma/posts/posts.service'; const model = new ChatOpenAI({ - apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'gpt-4o-2024-08-06', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4o-2024-08-06', temperature: 0, }); diff --git a/libraries/nestjs-libraries/src/agent/agent.graph.service.ts b/libraries/nestjs-libraries/src/agent/agent.graph.service.ts index d4697e377..92f22449b 100644 --- a/libraries/nestjs-libraries/src/agent/agent.graph.service.ts +++ b/libraries/nestjs-libraries/src/agent/agent.graph.service.ts @@ -22,14 +22,12 @@ const tools = !process.env.TAVILY_API_KEY const toolNode = new ToolNode(tools); const model = new ChatOpenAI({ - apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'gpt-4.1', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1', temperature: 0.7, }); const dalle = new DallEAPIWrapper({ - apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'dall-e-3', + model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3', }); interface WorkflowChannelsState { diff --git a/libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts b/libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts index 0b3c66d25..b84bc7895 100644 --- a/libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts +++ b/libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts @@ -32,14 +32,12 @@ interface WorkflowChannelsState { } const model = new ChatOpenAI({ - apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'gpt-4.1', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1', temperature: 0.7, }); const dalle = new DallEAPIWrapper({ - apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'gpt-image-1', + model: process.env.OPENAI_IMAGE_MODEL || 'gpt-image-1', }); const generateContent = z.object({ diff --git a/libraries/nestjs-libraries/src/openai/openai.service.ts b/libraries/nestjs-libraries/src/openai/openai.service.ts index 709ce0eac..3acb144d4 100644 --- a/libraries/nestjs-libraries/src/openai/openai.service.ts +++ b/libraries/nestjs-libraries/src/openai/openai.service.ts @@ -4,9 +4,7 @@ import { shuffle } from 'lodash'; import { zodResponseFormat } from 'openai/helpers/zod'; import { z } from 'zod'; -const openai = new OpenAI({ - apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', -}); +const openai = new OpenAI({}); const PicturePrompt = z.object({ prompt: z.string(), @@ -23,7 +21,7 @@ export class OpenaiService { await openai.images.generate({ prompt, response_format: isUrl ? 'url' : 'b64_json', - model: 'dall-e-3', + model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3', ...(isVertical ? { size: '1024x1792' } : {}), }) ).data[0]; @@ -35,7 +33,7 @@ export class OpenaiService { return ( ( await openai.beta.chat.completions.parse({ - model: 'gpt-4.1', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1', messages: [ { role: 'system', @@ -90,7 +88,7 @@ export class OpenaiService { ], n: 5, temperature: 1, - model: 'gpt-4.1', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1', }), openai.chat.completions.create({ messages: [ @@ -106,7 +104,7 @@ export class OpenaiService { ], n: 5, temperature: 1, - model: 'gpt-4.1', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1', }), ]) ).flatMap((p) => p.choices); @@ -144,7 +142,7 @@ export class OpenaiService { content, }, ], - model: 'gpt-4.1', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1', }); const { content: articleContent } = websiteContent.choices[0].message; @@ -164,7 +162,7 @@ export class OpenaiService { const posts = ( await openai.beta.chat.completions.parse({ - model: 'gpt-4.1', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1', messages: [ { role: 'system', @@ -197,7 +195,7 @@ export class OpenaiService { return ( ( await openai.beta.chat.completions.parse({ - model: 'gpt-4.1', + model: process.env.OPENAI_TEXT_MODEL || 'gpt-4.1', messages: [ { role: 'system',