Skip to content

Commit f3371ac

Browse files
di-sukharevtumfdrewpaymentmatscubeKellanStevens
authored
3.0.18 (#382)
* 📝 (README.md): add support for custom AI models and update documentation to reflect new environment variable OCO_ AI_PROVIDER (#351) * feat/add gemini (#349) * fix: prompt-module/@commitlint (#336) * docs: spelling fix (#325) --------- Co-authored-by: tumf <[email protected]> Co-authored-by: Drew Payment <[email protected]> Co-authored-by: Takanori Matsumoto <[email protected]> Co-authored-by: Kellan Stevens <[email protected]> Co-authored-by: JMN09 <[email protected]> Co-authored-by: JMN09 <[email protected]>
1 parent 07f7a05 commit f3371ac

File tree

8 files changed

+959
-768
lines changed

8 files changed

+959
-768
lines changed

out/cli.cjs

Lines changed: 436 additions & 381 deletions
Large diffs are not rendered by default.

out/github-action.cjs

Lines changed: 424 additions & 372 deletions
Large diffs are not rendered by default.

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "opencommit",
3-
"version": "3.0.17",
3+
"version": "3.0.18",
44
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
55
"keywords": [
66
"git",
@@ -98,5 +98,8 @@
9898
"ini": "^3.0.1",
9999
"inquirer": "^9.1.4",
100100
"openai": "^3.2.1"
101+
},
102+
"overrides": {
103+
"whatwg-url": "13.0.0"
101104
}
102105
}

src/commands/config.ts

Lines changed: 46 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,9 @@ export enum CONFIG_KEYS {
3333
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT',
3434
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
3535
OCO_API_URL = 'OCO_API_URL',
36-
OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL'
36+
OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL',
37+
OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT',
38+
OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY'
3739
}
3840

3941
export enum CONFIG_MODES {
@@ -130,7 +132,8 @@ export const configValidators = {
130132
config.OCO_ANTHROPIC_API_KEY ||
131133
config.OCO_AI_PROVIDER.startsWith('ollama') ||
132134
config.OCO_AZURE_API_KEY ||
133-
config.OCO_AI_PROVIDER == 'test',
135+
config.OCO_AI_PROVIDER == 'test' ||
136+
config.OCO_AI_PROVIDER == 'flowise',
134137
'You need to provide an OpenAI/Anthropic/Azure API key'
135138
);
136139
validateConfig(
@@ -149,7 +152,8 @@ export const configValidators = {
149152
config.OCO_OPENAI_API_KEY ||
150153
config.OCO_AZURE_API_KEY ||
151154
config.OCO_AI_PROVIDER == 'ollama' ||
152-
config.OCO_AI_PROVIDER == 'test',
155+
config.OCO_AI_PROVIDER == 'test' ||
156+
config.OCO_AI_PROVIDER == 'flowise',
153157
'You need to provide an OpenAI/Anthropic/Azure API key'
154158
);
155159

@@ -175,13 +179,24 @@ export const configValidators = {
175179
value ||
176180
config.OCO_OPENAI_API_KEY ||
177181
config.OCO_AI_PROVIDER == 'ollama' ||
178-
config.OCO_AI_PROVIDER == 'test',
182+
config.OCO_AI_PROVIDER == 'test' ||
183+
config.OCO_AI_PROVIDER == 'flowise',
179184
'You need to provide an OpenAI/Anthropic API key'
180185
);
181186

182187
return value;
183188
},
184189

190+
[CONFIG_KEYS.OCO_FLOWISE_API_KEY](value: any, config: any = {}) {
191+
validateConfig(
192+
CONFIG_KEYS.OCO_FLOWISE_API_KEY,
193+
value || config.OCO_AI_PROVIDER != 'flowise',
194+
'You need to provide a flowise API key'
195+
);
196+
197+
return value;
198+
},
199+
185200
[CONFIG_KEYS.OCO_DESCRIPTION](value: any) {
186201
validateConfig(
187202
CONFIG_KEYS.OCO_DESCRIPTION,
@@ -268,7 +283,8 @@ export const configValidators = {
268283
].includes(value) ||
269284
config.OCO_AI_PROVIDER == 'ollama' ||
270285
config.OCO_AI_PROVIDER == 'azure' ||
271-
config.OCO_AI_PROVIDER == 'test',
286+
config.OCO_AI_PROVIDER == 'test' ||
287+
config.OCO_AI_PROVIDER == 'flowise',
272288
`${value} is not supported yet, use:\n\n ${[
273289
...MODEL_LIST.openai,
274290
...MODEL_LIST.anthropic,
@@ -308,9 +324,16 @@ export const configValidators = {
308324
[CONFIG_KEYS.OCO_AI_PROVIDER](value: any) {
309325
validateConfig(
310326
CONFIG_KEYS.OCO_AI_PROVIDER,
311-
['', 'openai', 'anthropic', 'gemini', 'azure', 'test'].includes(value) ||
312-
value.startsWith('ollama'),
313-
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini' or 'openai' (default)`
327+
[
328+
'',
329+
'openai',
330+
'anthropic',
331+
'gemini',
332+
'azure',
333+
'test',
334+
'flowise'
335+
].includes(value) || value.startsWith('ollama'),
336+
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
314337
);
315338
return value;
316339
},
@@ -324,6 +347,7 @@ export const configValidators = {
324347

325348
return value;
326349
},
350+
327351
[CONFIG_KEYS.OCO_AZURE_ENDPOINT](value: any) {
328352
validateConfig(
329353
CONFIG_KEYS.OCO_AZURE_ENDPOINT,
@@ -333,6 +357,17 @@ export const configValidators = {
333357

334358
return value;
335359
},
360+
361+
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT](value: any) {
362+
validateConfig(
363+
CONFIG_KEYS.OCO_FLOWISE_ENDPOINT,
364+
typeof value === 'string' && value.includes(':'),
365+
'Value must be string and should include both I.P. and port number' // Considering the possibility of DNS lookup or feeding the I.P. explicitely, there is no pattern to verify, except a column for the port number
366+
);
367+
368+
return value;
369+
},
370+
336371
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE](value: any) {
337372
validateConfig(
338373
CONFIG_KEYS.OCO_TEST_MOCK_TYPE,
@@ -361,7 +396,6 @@ export type ConfigType = {
361396

362397
const defaultConfigPath = pathJoin(homedir(), '.opencommit');
363398
const defaultEnvPath = pathResolve(process.cwd(), '.env');
364-
365399
export const getConfig = ({
366400
configPath = defaultConfigPath,
367401
envPath = defaultEnvPath
@@ -397,9 +431,10 @@ export const getConfig = ({
397431
process.env.OCO_ONE_LINE_COMMIT === 'true' ? true : false,
398432
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || undefined,
399433
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || 'commit-message',
400-
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || undefined,
434+
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ':',
435+
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || undefined,
436+
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || undefined
401437
};
402-
403438
const configExists = existsSync(configPath);
404439
if (!configExists) return configFromEnv;
405440

src/engine/flowise.ts

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
import axios, { AxiosError } from 'axios';
2+
import { ChatCompletionRequestMessage } from 'openai';
3+
import { AiEngine } from './Engine';
4+
5+
import {
6+
getConfig
7+
} from '../commands/config';
8+
9+
const config = getConfig();
10+
11+
export class FlowiseAi implements AiEngine {
12+
13+
async generateCommitMessage(
14+
messages: Array<ChatCompletionRequestMessage>
15+
): Promise<string | undefined> {
16+
17+
const gitDiff = messages[ messages.length - 1 ]?.content?.replace(/\\/g, '\\\\')
18+
.replace(/"/g, '\\"')
19+
.replace(/\n/g, '\\n')
20+
.replace(/\r/g, '\\r')
21+
.replace(/\t/g, '\\t');
22+
const url = `http://${config?.OCO_FLOWISE_ENDPOINT}/api/v1/prediction/${config?.OCO_FLOWISE_API_KEY}`;
23+
const payload = {
24+
question : gitDiff,
25+
overrideConfig : {
26+
systemMessagePrompt: messages[0]?.content,
27+
},
28+
history : messages.slice( 1, -1 )
29+
}
30+
try {
31+
const response = await axios.post(url, payload, {
32+
headers: {
33+
'Content-Type': 'application/json'
34+
}
35+
});
36+
const message = response.data;
37+
return message?.text;
38+
} catch (err: any) {
39+
const message = err.response?.data?.error ?? err.message;
40+
throw new Error('local model issues. details: ' + message);
41+
}
42+
}
43+
}

src/utils/engine.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import { OllamaAi } from '../engine/ollama';
66
import { AnthropicAi } from '../engine/anthropic'
77
import { TestAi } from '../engine/testAi';
88
import { Azure } from '../engine/azure';
9+
import { FlowiseAi } from '../engine/flowise'
910

1011
export function getEngine(): AiEngine {
1112
const config = getConfig();
@@ -27,6 +28,8 @@ export function getEngine(): AiEngine {
2728
return new Gemini();
2829
} else if (provider == 'azure') {
2930
return new Azure();
31+
} else if( provider == 'flowise'){
32+
return new FlowiseAi();
3033
}
3134

3235
//open ai gpt by default

test/unit/config.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,4 +102,4 @@ OCO_ONE_LINE_COMMIT="true"
102102

103103
await envFile.cleanup();
104104
});
105-
});
105+
});

0 commit comments

Comments
 (0)