Skip to content

Commit 673eee2

Browse files
Feat: Add Claude 3 support (#318)
* 3.0.12 * build * feat: anthropic claude 3 support * fix: add system prompt * fix: type check * fix: package version * fix: update anthropic for dependency bug fix * feat: update build files * feat: update version number --------- Co-authored-by: di-sukharev <[email protected]>
1 parent 91399a0 commit 673eee2

File tree

9 files changed

+63643
-23456
lines changed

9 files changed

+63643
-23456
lines changed

out/cli.cjs

Lines changed: 19810 additions & 6704 deletions
Large diffs are not rendered by default.

out/github-action.cjs

Lines changed: 41854 additions & 15415 deletions
Large diffs are not rendered by default.

out/tiktoken_bg.wasm

297 KB
Binary file not shown.

package-lock.json

Lines changed: 1777 additions & 1313 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "opencommit",
3-
"version": "3.0.12",
3+
"version": "3.0.13",
44
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
55
"keywords": [
66
"git",
@@ -73,6 +73,7 @@
7373
"@actions/core": "^1.10.0",
7474
"@actions/exec": "^1.1.1",
7575
"@actions/github": "^5.1.1",
76+
"@anthropic-ai/sdk": "^0.19.2",
7677
"@clack/prompts": "^0.6.1",
7778
"@dqbd/tiktoken": "^1.0.2",
7879
"@octokit/webhooks-schemas": "^6.11.0",

src/commands/config.ts

Lines changed: 46 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ dotenv.config();
1515

1616
export enum CONFIG_KEYS {
1717
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
18+
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY',
1819
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
1920
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
2021
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
@@ -34,6 +35,31 @@ export enum CONFIG_MODES {
3435
set = 'set'
3536
}
3637

38+
export const MODEL_LIST = {
39+
openai: ['gpt-3.5-turbo',
40+
'gpt-3.5-turbo-0125',
41+
'gpt-4',
42+
'gpt-4-turbo',
43+
'gpt-4-1106-preview',
44+
'gpt-4-turbo-preview',
45+
'gpt-4-0125-preview'],
46+
47+
anthropic: ['claude-3-haiku-20240307',
48+
'claude-3-sonnet-20240229',
49+
'claude-3-opus-20240229']
50+
}
51+
52+
const getDefaultModel = (provider: string | undefined): string => {
53+
switch (provider) {
54+
case 'ollama':
55+
return '';
56+
case 'anthropic':
57+
return MODEL_LIST.anthropic[0];
58+
default:
59+
return MODEL_LIST.openai[0];
60+
}
61+
};
62+
3763
export enum DEFAULT_TOKEN_LIMITS {
3864
DEFAULT_MAX_TOKENS_INPUT = 4096,
3965
DEFAULT_MAX_TOKENS_OUTPUT = 500
@@ -57,9 +83,9 @@ export const configValidators = {
5783
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
5884
//need api key unless running locally with ollama
5985
validateConfig(
60-
'API_KEY',
61-
value || config.OCO_AI_PROVIDER == 'ollama' || config.OCO_AI_PROVIDER == 'test',
62-
'You need to provide an API key'
86+
'OpenAI API_KEY',
87+
value || config.OCO_ANTHROPIC_API_KEY || config.OCO_AI_PROVIDER == 'ollama' || config.OCO_AI_PROVIDER == 'test',
88+
'You need to provide an OpenAI/Anthropic API key'
6389
);
6490
validateConfig(
6591
CONFIG_KEYS.OCO_OPENAI_API_KEY,
@@ -75,6 +101,16 @@ export const configValidators = {
75101
return value;
76102
},
77103

104+
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY](value: any, config: any = {}) {
105+
validateConfig(
106+
'ANTHROPIC_API_KEY',
107+
value || config.OCO_OPENAI_API_KEY || config.OCO_AI_PROVIDER == 'ollama' || config.OCO_AI_PROVIDER == 'test',
108+
'You need to provide an OpenAI/Anthropic API key'
109+
);
110+
111+
return value;
112+
},
113+
78114
[CONFIG_KEYS.OCO_DESCRIPTION](value: any) {
79115
validateConfig(
80116
CONFIG_KEYS.OCO_DESCRIPTION,
@@ -154,19 +190,12 @@ export const configValidators = {
154190
[CONFIG_KEYS.OCO_MODEL](value: any) {
155191
validateConfig(
156192
CONFIG_KEYS.OCO_MODEL,
157-
[
158-
'gpt-3.5-turbo',
159-
'gpt-3.5-turbo-0125',
160-
'gpt-4',
161-
'gpt-4-1106-preview',
162-
'gpt-4-0125-preview',
163-
'gpt-4-turbo',
164-
'gpt-4-turbo-preview'
165-
].includes(value),
166-
`${value} is not supported yet, use 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-0125-preview' or 'gpt-4-turbo-preview'`
193+
[...MODEL_LIST.openai, ...MODEL_LIST.anthropic].includes(value),
194+
`${value} is not supported yet, use 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview', 'gpt-4-0125-preview', 'claude-3-opus-20240229', 'claude-3-sonnet-20240229' or 'claude-3-haiku-20240307'`
167195
);
168196
return value;
169197
},
198+
170199
[CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER](value: any) {
171200
validateConfig(
172201
CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
@@ -200,10 +229,11 @@ export const configValidators = {
200229
[
201230
'',
202231
'openai',
232+
'anthropic',
203233
'ollama',
204234
'test'
205235
].includes(value),
206-
`${value} is not supported yet, use 'ollama' or 'openai' (default)`
236+
`${value} is not supported yet, use 'ollama' 'anthropic' or 'openai' (default)`
207237
);
208238
return value;
209239
},
@@ -228,6 +258,7 @@ const configPath = pathJoin(homedir(), '.opencommit');
228258
export const getConfig = (): ConfigType | null => {
229259
const configFromEnv = {
230260
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
261+
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
231262
OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT
232263
? Number(process.env.OCO_TOKENS_MAX_INPUT)
233264
: undefined,
@@ -237,7 +268,7 @@ export const getConfig = (): ConfigType | null => {
237268
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
238269
OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === 'true' ? true : false,
239270
OCO_EMOJI: process.env.OCO_EMOJI === 'true' ? true : false,
240-
OCO_MODEL: process.env.OCO_MODEL || 'gpt-3.5-turbo',
271+
OCO_MODEL: process.env.OCO_MODEL || getDefaultModel(process.env.OCO_AI_PROVIDER),
241272
OCO_LANGUAGE: process.env.OCO_LANGUAGE || 'en',
242273
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
243274
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || '$msg',

src/engine/anthropic.ts

Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
import axios from 'axios';
2+
import chalk from 'chalk';
3+
4+
import Anthropic from '@anthropic-ai/sdk';
5+
import {ChatCompletionRequestMessage} from 'openai'
6+
import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources';
7+
8+
import { intro, outro } from '@clack/prompts';
9+
10+
import {
11+
CONFIG_MODES,
12+
DEFAULT_TOKEN_LIMITS,
13+
getConfig
14+
} from '../commands/config';
15+
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
16+
import { tokenCount } from '../utils/tokenCount';
17+
import { AiEngine } from './Engine';
18+
import { MODEL_LIST } from '../commands/config';
19+
20+
const config = getConfig();
21+
22+
const MAX_TOKENS_OUTPUT =
23+
config?.OCO_TOKENS_MAX_OUTPUT ||
24+
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
25+
const MAX_TOKENS_INPUT =
26+
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
27+
28+
let provider = config?.OCO_AI_PROVIDER;
29+
let apiKey = config?.OCO_ANTHROPIC_API_KEY;
30+
const [command, mode] = process.argv.slice(2);
31+
if (
32+
provider === 'anthropic' &&
33+
!apiKey &&
34+
command !== 'config' &&
35+
mode !== CONFIG_MODES.set
36+
) {
37+
intro('opencommit');
38+
39+
outro(
40+
'OCO_ANTHROPIC_API_KEY is not set, please run `oco config set OCO_ANTHROPIC_API_KEY=<your token> . If you are using Claude, make sure you add payment details, so API works.`'
41+
);
42+
outro(
43+
'For help look into README https://github.com/di-sukharev/opencommit#setup'
44+
);
45+
46+
process.exit(1);
47+
}
48+
49+
const MODEL = config?.OCO_MODEL;
50+
if (provider === 'anthropic' &&
51+
!MODEL_LIST.anthropic.includes(MODEL) &&
52+
command !== 'config' &&
53+
mode !== CONFIG_MODES.set) {
54+
outro(
55+
`${chalk.red('✖')} Unsupported model ${MODEL} for Anthropic. Supported models are: ${MODEL_LIST.anthropic.join(
56+
', '
57+
)}`
58+
);
59+
process.exit(1);
60+
}
61+
62+
class AnthropicAi implements AiEngine {
63+
private anthropicAiApiConfiguration = {
64+
apiKey: apiKey
65+
};
66+
private anthropicAI!: Anthropic;
67+
68+
constructor() {
69+
this.anthropicAI = new Anthropic(this.anthropicAiApiConfiguration);
70+
}
71+
72+
public generateCommitMessage = async (
73+
messages: Array<ChatCompletionRequestMessage>
74+
): Promise<string | undefined> => {
75+
76+
const systemMessage = messages.find(msg => msg.role === 'system')?.content as string;
77+
const restMessages = messages.filter((msg) => msg.role !== 'system') as MessageParam[];
78+
79+
const params: MessageCreateParamsNonStreaming = {
80+
model: MODEL,
81+
system: systemMessage,
82+
messages: restMessages,
83+
temperature: 0,
84+
top_p: 0.1,
85+
max_tokens: MAX_TOKENS_OUTPUT
86+
};
87+
try {
88+
const REQUEST_TOKENS = messages
89+
.map((msg) => tokenCount(msg.content as string) + 4)
90+
.reduce((a, b) => a + b, 0);
91+
92+
if (REQUEST_TOKENS > MAX_TOKENS_INPUT - MAX_TOKENS_OUTPUT) {
93+
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
94+
}
95+
96+
const data = await this.anthropicAI.messages.create(params);
97+
98+
const message = data?.content[0].text;
99+
100+
return message;
101+
} catch (error) {
102+
outro(`${chalk.red('✖')} ${JSON.stringify(params)}`);
103+
104+
const err = error as Error;
105+
outro(`${chalk.red('✖')} ${err?.message || err}`);
106+
107+
if (
108+
axios.isAxiosError<{ error?: { message: string } }>(error) &&
109+
error.response?.status === 401
110+
) {
111+
const anthropicAiError = error.response.data.error;
112+
113+
if (anthropicAiError?.message) outro(anthropicAiError.message);
114+
outro(
115+
'For help look into README https://github.com/di-sukharev/opencommit#setup'
116+
);
117+
}
118+
119+
throw err;
120+
}
121+
};
122+
}
123+
124+
export const anthropicAi = new AnthropicAi();

src/engine/openAi.ts

Lines changed: 27 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import axios from 'axios';
22
import chalk from 'chalk';
33
import { execa } from 'execa';
4+
45
import {
56
ChatCompletionRequestMessage,
67
Configuration as OpenAiApiConfiguration,
@@ -17,20 +18,28 @@ import {
1718
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
1819
import { tokenCount } from '../utils/tokenCount';
1920
import { AiEngine } from './Engine';
21+
import { MODEL_LIST } from '../commands/config';
2022

2123
const config = getConfig();
2224

23-
const MAX_TOKENS_OUTPUT = config?.OCO_TOKENS_MAX_OUTPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
24-
const MAX_TOKENS_INPUT = config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
25+
const MAX_TOKENS_OUTPUT =
26+
config?.OCO_TOKENS_MAX_OUTPUT ||
27+
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
28+
const MAX_TOKENS_INPUT =
29+
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
2530
let basePath = config?.OCO_OPENAI_BASE_PATH;
26-
let apiKey = config?.OCO_OPENAI_API_KEY
31+
let apiKey = config?.OCO_OPENAI_API_KEY;
2732

2833
const [command, mode] = process.argv.slice(2);
2934

30-
const isLocalModel = config?.OCO_AI_PROVIDER == 'ollama' || config?.OCO_AI_PROVIDER == 'test';
31-
35+
const provider = config?.OCO_AI_PROVIDER;
3236

33-
if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set && !isLocalModel) {
37+
if (
38+
provider === 'openai' &&
39+
!apiKey &&
40+
command !== 'config' &&
41+
mode !== CONFIG_MODES.set
42+
) {
3443
intro('opencommit');
3544

3645
outro(
@@ -44,6 +53,18 @@ if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set && !isLocalMode
4453
}
4554

4655
const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
56+
if (provider === 'openai' &&
57+
!MODEL_LIST.openai.includes(MODEL) &&
58+
command !== 'config' &&
59+
mode !== CONFIG_MODES.set) {
60+
outro(
61+
`${chalk.red('✖')} Unsupported model ${MODEL} for OpenAI. Supported models are: ${MODEL_LIST.openai.join(
62+
', '
63+
)}`
64+
);
65+
66+
process.exit(1);
67+
}
4768

4869
class OpenAi implements AiEngine {
4970
private openAiApiConfiguration = new OpenAiApiConfiguration({
@@ -105,6 +126,4 @@ class OpenAi implements AiEngine {
105126
};
106127
}
107128

108-
109-
110129
export const api = new OpenAi();

src/utils/engine.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,15 @@ import { AiEngine } from '../engine/Engine';
22
import { api } from '../engine/openAi';
33
import { getConfig } from '../commands/config';
44
import { ollamaAi } from '../engine/ollama';
5+
import { anthropicAi } from '../engine/anthropic'
56
import { testAi } from '../engine/testAi';
67

78
export function getEngine(): AiEngine {
89
const config = getConfig();
910
if (config?.OCO_AI_PROVIDER == 'ollama') {
1011
return ollamaAi;
12+
} else if (config?.OCO_AI_PROVIDER == 'anthropic') {
13+
return anthropicAi;
1114
} else if (config?.OCO_AI_PROVIDER == 'test') {
1215
return testAi;
1316
}

0 commit comments

Comments
 (0)