diff --git a/src/commands/config.ts b/src/commands/config.ts index 644273cd..cccbe55c 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -34,6 +34,8 @@ export enum CONFIG_KEYS { OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE', OCO_API_URL = 'OCO_API_URL', OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL', + OCO_BACKEND_ENDPOINT = 'OCO_BACKEND_ENDPOINT', + OCO_BACKEND_PATH = 'OCO_BACKEND_PATH', OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT', OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY' } @@ -45,6 +47,7 @@ export enum CONFIG_MODES { export const MODEL_LIST = { openai: [ + 'gpt-4o-mini', 'gpt-4o-mini', 'gpt-3.5-turbo', 'gpt-3.5-turbo-instruct', @@ -132,8 +135,9 @@ export const configValidators = { config.OCO_ANTHROPIC_API_KEY || config.OCO_AI_PROVIDER.startsWith('ollama') || config.OCO_AZURE_API_KEY || - config.OCO_AI_PROVIDER == 'test' || - config.OCO_AI_PROVIDER == 'flowise', + config.OCO_AI_PROVIDER == 'flowise' || + config.OCO_AI_PROVIDER == 'llmservice' || + config.OCO_AI_PROVIDER == 'test', 'You need to provide an OpenAI/Anthropic/Azure or other provider API key via `oco config set OCO_OPENAI_API_KEY=your_key`, for help refer to docs https://github.com/di-sukharev/opencommit' ); validateConfig( @@ -152,8 +156,9 @@ export const configValidators = { config.OCO_OPENAI_API_KEY || config.OCO_AZURE_API_KEY || config.OCO_AI_PROVIDER == 'ollama' || - config.OCO_AI_PROVIDER == 'test' || - config.OCO_AI_PROVIDER == 'flowise', + config.OCO_AI_PROVIDER == 'llmservice' || + config.OCO_AI_PROVIDER == 'flowise' || + config.OCO_AI_PROVIDER == 'test', 'You need to provide an OpenAI/Anthropic/Azure API key' ); @@ -179,8 +184,9 @@ export const configValidators = { value || config.OCO_OPENAI_API_KEY || config.OCO_AI_PROVIDER == 'ollama' || - config.OCO_AI_PROVIDER == 'test' || - config.OCO_AI_PROVIDER == 'flowise', + config.OCO_AI_PROVIDER == 'llmservice' || + config.OCO_AI_PROVIDER == 'flowise' || + config.OCO_AI_PROVIDER == 'test', 'You need to provide an OpenAI/Anthropic API key' ); @@ -323,9 +329,10 @@ export const configValidators = { 'gemini', 'azure', 'test', - 'flowise' + 'flowise', + 'llmservice' ].includes(value) || value.startsWith('ollama'), - `${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)` + `${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise', 'llmservice' or 'openai' (default)` ); return value; }, @@ -379,7 +386,26 @@ export const configValidators = { `${value} is not a valid URL` ); return value; + }, + + [CONFIG_KEYS.OCO_BACKEND_ENDPOINT](value: any) { + validateConfig( + CONFIG_KEYS.OCO_BACKEND_ENDPOINT, + typeof value === 'string', + 'Must be string' + ); + return value; + }, + + [CONFIG_KEYS.OCO_BACKEND_PATH](value: any) { + validateConfig( + CONFIG_KEYS.OCO_BACKEND_PATH, + typeof value === 'string', + 'Must be string' + ); + return value; } + }; export type ConfigType = { @@ -423,6 +449,8 @@ export const getConfig = ({ process.env.OCO_ONE_LINE_COMMIT === 'true' ? true : false, OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || undefined, OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || 'commit-message', + OCO_BACKEND_ENDPOINT: process.env.OCO_BACKEND_ENDPOINT || 'localhost:8000', + OCO_BACKEND_PATH: process.env.OCO_BACKEND_PATH || 'api/generate', OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ':', OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || undefined, OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || undefined @@ -452,6 +480,7 @@ export const getConfig = ({ `Manually fix the '.env' file or global '~/.opencommit' config file.` ); + process.exit(1); } } diff --git a/src/engine/llmservice.ts b/src/engine/llmservice.ts new file mode 100644 index 00000000..31534804 --- /dev/null +++ b/src/engine/llmservice.ts @@ -0,0 +1,37 @@ +import axios, { AxiosError } from 'axios'; +import { ChatCompletionRequestMessage } from 'openai'; +import { AiEngine } from './Engine'; +import { + getConfig +} from '../commands/config'; + +const config = getConfig(); + + +export class LlmService implements AiEngine { + + async generateCommitMessage( + messages: Array + ): Promise { + + const gitDiff = messages[ messages.length - 1 ]?.content; + const url = `http://${config?.OCO_BACKEND_ENDPOINT}/${config?.OCO_BACKEND_PATH}`; + const payload = { + user_prompt: gitDiff + } + + try { + const response = await axios.post(url, payload, { + headers: { + 'Content-Type': 'application/json' + } + }); + const message = response.data; + + return message; + } catch (err: any) { + const message = err.response?.data?.error ?? err.message; + throw new Error('local model issues. details: ' + message); + } + } +} \ No newline at end of file diff --git a/src/utils/engine.ts b/src/utils/engine.ts index e5845301..68e458fd 100644 --- a/src/utils/engine.ts +++ b/src/utils/engine.ts @@ -6,6 +6,7 @@ import { OllamaAi } from '../engine/ollama'; import { AnthropicAi } from '../engine/anthropic' import { TestAi } from '../engine/testAi'; import { Azure } from '../engine/azure'; +import { LlmService } from '../engine/llmservice'; import { FlowiseAi } from '../engine/flowise' export function getEngine(): AiEngine { @@ -28,6 +29,8 @@ export function getEngine(): AiEngine { return new Gemini(); } else if (provider == 'azure') { return new Azure(); + } else if(provider == 'llmservice'){ + return new LlmService(); } else if( provider == 'flowise'){ return new FlowiseAi(); }