Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

<h1>AI Commit</h1>

Use OpenAI / Azure OpenAI / DeepSeek / Gemini API to review Git changes, generate conventional commit messages that meet the conventions, simplify the commit process, and keep the commit conventions consistent.
Use OpenAI / Azure OpenAI / DeepSeek / Gemini API to review Git changes / Groq, generate conventional commit messages that meet the conventions, simplify the commit process, and keep the commit conventions consistent.

**English** · [简体中文](./README.zh_CN.md) · [Report Bug][github-issues-link] · [Request Feature][github-issues-link]

Expand Down Expand Up @@ -61,7 +61,7 @@ In the VSCode settings, locate the "ai-commit" configuration options and configu

| Configuration | Type | Default | Required | Notes |
| :----------------- | :----: | :------------------: | :------: | :----------------------------------------------------------------------------------------------------------------: |
| AI_PROVIDER | string | openai | Yes | Select AI Provider: `openai` or `gemini`. |
| AI_PROVIDER | string | openai | Yes | Select AI Provider: `openai`, `gemini`, or `groq`. |
| OPENAI_API_KEY | string | None | Yes | Required when `AI Provider` is set to `OpenAI`. [OpenAI token](https://platform.openai.com/account/api-keys) |
| OPENAI_BASE_URL | string | None | No | If using Azure, use: https://{resource}.openai.azure.com/openai/deployments/{model} |
| OPENAI_MODEL | string | gpt-4o | Yes | OpenAI MODEL, you can select a model from the list by running the `Show Available OpenAI Models` command |
Expand All @@ -70,6 +70,9 @@ In the VSCode settings, locate the "ai-commit" configuration options and configu
| GEMINI_API_KEY | string | None | Yes | Required when `AI Provider` is set to `Gemini`. [Gemini API key](https://makersuite.google.com/app/apikey) |
| GEMINI_MODEL | string | gemini-2.0-flash-001 | Yes | Gemini MODEL. Currently, model selection is limited to configuration. |
| GEMINI_TEMPERATURE | number | 0.7 | No | Controls randomness in the output. Range: 0-2 for Gemini. Lower values: more focused, Higher values: more creative |
| GROQ_API_KEY | string | None | Yes | Required when `AI Provider` is set to `Groq`. [Groq API key](https://console.groq.com/keys) |
| GROQ_MODEL | string |llama-3.3-70b-versatile| Yes | Groq MODEL to use (default: llama-3.3-70b-versatile) |
| GROQ_TEMPERATURE | number | 0.7 | No | Controls randomness in the output. Range: 0-2 for Groq. Lower values: more focused, Higher values: more creative |
| AI_COMMIT_LANGUAGE | string | en | Yes | Supports 19 languages |
| SYSTEM_PROMPT | string | None | No | Custom system prompt |

Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

22 changes: 20 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -128,12 +128,30 @@
"ai-commit.AI_PROVIDER": {
"type": "string",
"default": "openai",
"description": "AI Provider to use (OpenAI or Gemini)",
"description": "AI Provider to use (OpenAI, Gemini, or Groq)",
"enum": [
"openai",
"gemini"
"gemini",
"groq"
]
},
"ai-commit.GROQ_API_KEY": {
"type": "string",
"default": "",
"description": "Groq API Key"
},
"ai-commit.GROQ_MODEL": {
"type": "string",
"default": "llama-3.3-70b-versatile",
"description": "Groq Model to use"
},
"ai-commit.GROQ_TEMPERATURE": {
"type": "number",
"default": 0.7,
"minimum": 0,
"maximum": 2,
"description": "Groq temperature setting (0-2). Controls randomness."
},
"ai-commit.GEMINI_API_KEY": {
"type": "string",
"default": "",
Expand Down
5 changes: 5 additions & 0 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,11 @@ export enum ConfigKeys {
GEMINI_API_KEY = 'GEMINI_API_KEY',
GEMINI_MODEL = 'GEMINI_MODEL',
GEMINI_TEMPERATURE = 'GEMINI_TEMPERATURE',

GROQ_API_KEY = 'GROQ_API_KEY',
GROQ_MODEL = 'GROQ_MODEL',
GROQ_TEMPERATURE = 'GROQ_TEMPERATURE',

AI_PROVIDER = 'AI_PROVIDER',
}

Expand Down
36 changes: 24 additions & 12 deletions src/generate-commit-msg.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { ChatGPTAPI } from './openai-utils';
import { getMainCommitPrompt } from './prompts';
import { ProgressHandler } from './utils';
import { GeminiAPI } from './gemini-utils';
import { GroqAPI } from './groq-utils';

/**
* Generates a chat completion prompt for the commit message based on the provided diff.
Expand Down Expand Up @@ -111,18 +112,27 @@ export async function generateCommitMsg(arg) {
try {
let commitMessage: string | undefined;

if (aiProvider === 'gemini') {
const geminiApiKey = configManager.getConfig<string>(ConfigKeys.GEMINI_API_KEY);
if (!geminiApiKey) {
throw new Error('Gemini API Key not configured');
}
commitMessage = await GeminiAPI(messages);
} else {
const openaiApiKey = configManager.getConfig<string>(ConfigKeys.OPENAI_API_KEY);
if (!openaiApiKey) {
throw new Error('OpenAI API Key not configured');
}
commitMessage = await ChatGPTAPI(messages as ChatCompletionMessageParam[]);
switch (aiProvider) {
case 'gemini':
const geminiApiKey = configManager.getConfig<string>(ConfigKeys.GEMINI_API_KEY);
if (!geminiApiKey) {
throw new Error('Gemini API Key not configured');
}
commitMessage = await GeminiAPI(messages);
break;
case 'groq':
const groqApiKey = configManager.getConfig<string>(ConfigKeys.GROQ_API_KEY);
if (!groqApiKey) {
throw new Error('Groq API Key not configured');
}
commitMessage = await GroqAPI(messages);
break;
default:
const openaiApiKey = configManager.getConfig<string>(ConfigKeys.OPENAI_API_KEY);
if (!openaiApiKey) {
throw new Error('OpenAI API Key not configured');
}
commitMessage = await ChatGPTAPI(messages as ChatCompletionMessageParam[]);
}


Expand Down Expand Up @@ -151,6 +161,8 @@ export async function generateCommitMsg(arg) {
}
} else if (aiProvider === 'gemini') {
errorMessage = `Gemini API error: ${err.message}`;
} else if (aiProvider === 'groq') {
errorMessage = `Groq API error: ${err.message}`;
}

throw new Error(errorMessage);
Expand Down
67 changes: 67 additions & 0 deletions src/groq-utils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import * as https from 'https';
import { ConfigKeys, ConfigurationManager } from './config';

function getGroqHeaders(apiKey: string) {
return {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
};
}

function makeGroqRequest(data: any, apiKey: string): Promise<string> {
return new Promise((resolve, reject) => {
const options = {
hostname: 'api.groq.com',
path: '/openai/v1/chat/completions',
method: 'POST',
headers: getGroqHeaders(apiKey)
};

const req = https.request(options, (res) => {
let responseData = '';

res.on('data', (chunk) => {
responseData += chunk;
});

res.on('end', () => {
try {
const jsonResponse = JSON.parse(responseData);
if (jsonResponse.error) {
reject(new Error(jsonResponse.error.message || 'Groq API Error'));
} else {
resolve(jsonResponse.choices[0]?.message?.content);
}
} catch (error) {
reject(new Error('Failed to parse Groq API response'));
}
});
});

req.on('error', (error) => {
reject(error);
});

req.write(JSON.stringify(data));
req.end();
});
}

export async function GroqAPI(messages: any[]) {
const configManager = ConfigurationManager.getInstance();
const apiKey = configManager.getConfig<string>(ConfigKeys.GROQ_API_KEY);
const model = configManager.getConfig<string>(ConfigKeys.GROQ_MODEL);
const temperature = configManager.getConfig<number>(ConfigKeys.GROQ_TEMPERATURE, 0.7);

if (!apiKey) {
throw new Error('Groq API Key not configured');
}

const data = {
model: model || 'llama-3.3-70b-versatile',
messages: messages,
temperature
};

return makeGroqRequest(data, apiKey);
}