diff --git a/README.md b/README.md
index 28687ac..fc416c2 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@
AI Commit
-Use OpenAI / Azure OpenAI / DeepSeek / Gemini API to review Git changes, generate conventional commit messages that meet the conventions, simplify the commit process, and keep the commit conventions consistent.
+Use OpenAI / Azure OpenAI / DeepSeek / Gemini API to review Git changes / Groq, generate conventional commit messages that meet the conventions, simplify the commit process, and keep the commit conventions consistent.
**English** · [简体中文](./README.zh_CN.md) · [Report Bug][github-issues-link] · [Request Feature][github-issues-link]
@@ -61,7 +61,7 @@ In the VSCode settings, locate the "ai-commit" configuration options and configu
| Configuration | Type | Default | Required | Notes |
| :----------------- | :----: | :------------------: | :------: | :----------------------------------------------------------------------------------------------------------------: |
-| AI_PROVIDER | string | openai | Yes | Select AI Provider: `openai` or `gemini`. |
+| AI_PROVIDER | string | openai | Yes | Select AI Provider: `openai`, `gemini`, or `groq`. |
| OPENAI_API_KEY | string | None | Yes | Required when `AI Provider` is set to `OpenAI`. [OpenAI token](https://platform.openai.com/account/api-keys) |
| OPENAI_BASE_URL | string | None | No | If using Azure, use: https://{resource}.openai.azure.com/openai/deployments/{model} |
| OPENAI_MODEL | string | gpt-4o | Yes | OpenAI MODEL, you can select a model from the list by running the `Show Available OpenAI Models` command |
@@ -70,6 +70,9 @@ In the VSCode settings, locate the "ai-commit" configuration options and configu
| GEMINI_API_KEY | string | None | Yes | Required when `AI Provider` is set to `Gemini`. [Gemini API key](https://makersuite.google.com/app/apikey) |
| GEMINI_MODEL | string | gemini-2.0-flash-001 | Yes | Gemini MODEL. Currently, model selection is limited to configuration. |
| GEMINI_TEMPERATURE | number | 0.7 | No | Controls randomness in the output. Range: 0-2 for Gemini. Lower values: more focused, Higher values: more creative |
+| GROQ_API_KEY | string | None | Yes | Required when `AI Provider` is set to `Groq`. [Groq API key](https://console.groq.com/keys) |
+| GROQ_MODEL | string |llama-3.3-70b-versatile| Yes | Groq MODEL to use (default: llama-3.3-70b-versatile) |
+| GROQ_TEMPERATURE | number | 0.7 | No | Controls randomness in the output. Range: 0-2 for Groq. Lower values: more focused, Higher values: more creative |
| AI_COMMIT_LANGUAGE | string | en | Yes | Supports 19 languages |
| SYSTEM_PROMPT | string | None | No | Custom system prompt |
diff --git a/package-lock.json b/package-lock.json
index d171179..7b1a2c1 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "ai-commit",
- "version": "0.1.0",
+ "version": "0.1.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "ai-commit",
- "version": "0.1.0",
+ "version": "0.1.1",
"license": "MIT",
"dependencies": {
"@google/generative-ai": "^0.21.0",
diff --git a/package.json b/package.json
index 5a27c4a..31f2060 100644
--- a/package.json
+++ b/package.json
@@ -128,12 +128,30 @@
"ai-commit.AI_PROVIDER": {
"type": "string",
"default": "openai",
- "description": "AI Provider to use (OpenAI or Gemini)",
+ "description": "AI Provider to use (OpenAI, Gemini, or Groq)",
"enum": [
"openai",
- "gemini"
+ "gemini",
+ "groq"
]
},
+ "ai-commit.GROQ_API_KEY": {
+ "type": "string",
+ "default": "",
+ "description": "Groq API Key"
+ },
+ "ai-commit.GROQ_MODEL": {
+ "type": "string",
+ "default": "llama-3.3-70b-versatile",
+ "description": "Groq Model to use"
+ },
+ "ai-commit.GROQ_TEMPERATURE": {
+ "type": "number",
+ "default": 0.7,
+ "minimum": 0,
+ "maximum": 2,
+ "description": "Groq temperature setting (0-2). Controls randomness."
+ },
"ai-commit.GEMINI_API_KEY": {
"type": "string",
"default": "",
diff --git a/src/config.ts b/src/config.ts
index df0194a..1ca48c5 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -25,6 +25,11 @@ export enum ConfigKeys {
GEMINI_API_KEY = 'GEMINI_API_KEY',
GEMINI_MODEL = 'GEMINI_MODEL',
GEMINI_TEMPERATURE = 'GEMINI_TEMPERATURE',
+
+ GROQ_API_KEY = 'GROQ_API_KEY',
+ GROQ_MODEL = 'GROQ_MODEL',
+ GROQ_TEMPERATURE = 'GROQ_TEMPERATURE',
+
AI_PROVIDER = 'AI_PROVIDER',
}
diff --git a/src/generate-commit-msg.ts b/src/generate-commit-msg.ts
index 397ea36..18578ee 100644
--- a/src/generate-commit-msg.ts
+++ b/src/generate-commit-msg.ts
@@ -7,6 +7,7 @@ import { ChatGPTAPI } from './openai-utils';
import { getMainCommitPrompt } from './prompts';
import { ProgressHandler } from './utils';
import { GeminiAPI } from './gemini-utils';
+import { GroqAPI } from './groq-utils';
/**
* Generates a chat completion prompt for the commit message based on the provided diff.
@@ -111,18 +112,27 @@ export async function generateCommitMsg(arg) {
try {
let commitMessage: string | undefined;
- if (aiProvider === 'gemini') {
- const geminiApiKey = configManager.getConfig(ConfigKeys.GEMINI_API_KEY);
- if (!geminiApiKey) {
- throw new Error('Gemini API Key not configured');
- }
- commitMessage = await GeminiAPI(messages);
- } else {
- const openaiApiKey = configManager.getConfig(ConfigKeys.OPENAI_API_KEY);
- if (!openaiApiKey) {
- throw new Error('OpenAI API Key not configured');
- }
- commitMessage = await ChatGPTAPI(messages as ChatCompletionMessageParam[]);
+ switch (aiProvider) {
+ case 'gemini':
+ const geminiApiKey = configManager.getConfig(ConfigKeys.GEMINI_API_KEY);
+ if (!geminiApiKey) {
+ throw new Error('Gemini API Key not configured');
+ }
+ commitMessage = await GeminiAPI(messages);
+ break;
+ case 'groq':
+ const groqApiKey = configManager.getConfig(ConfigKeys.GROQ_API_KEY);
+ if (!groqApiKey) {
+ throw new Error('Groq API Key not configured');
+ }
+ commitMessage = await GroqAPI(messages);
+ break;
+ default:
+ const openaiApiKey = configManager.getConfig(ConfigKeys.OPENAI_API_KEY);
+ if (!openaiApiKey) {
+ throw new Error('OpenAI API Key not configured');
+ }
+ commitMessage = await ChatGPTAPI(messages as ChatCompletionMessageParam[]);
}
@@ -151,6 +161,8 @@ export async function generateCommitMsg(arg) {
}
} else if (aiProvider === 'gemini') {
errorMessage = `Gemini API error: ${err.message}`;
+ } else if (aiProvider === 'groq') {
+ errorMessage = `Groq API error: ${err.message}`;
}
throw new Error(errorMessage);
diff --git a/src/groq-utils.ts b/src/groq-utils.ts
new file mode 100644
index 0000000..0016a5f
--- /dev/null
+++ b/src/groq-utils.ts
@@ -0,0 +1,67 @@
+import * as https from 'https';
+import { ConfigKeys, ConfigurationManager } from './config';
+
+function getGroqHeaders(apiKey: string) {
+ return {
+ 'Authorization': `Bearer ${apiKey}`,
+ 'Content-Type': 'application/json'
+ };
+}
+
+function makeGroqRequest(data: any, apiKey: string): Promise {
+ return new Promise((resolve, reject) => {
+ const options = {
+ hostname: 'api.groq.com',
+ path: '/openai/v1/chat/completions',
+ method: 'POST',
+ headers: getGroqHeaders(apiKey)
+ };
+
+ const req = https.request(options, (res) => {
+ let responseData = '';
+
+ res.on('data', (chunk) => {
+ responseData += chunk;
+ });
+
+ res.on('end', () => {
+ try {
+ const jsonResponse = JSON.parse(responseData);
+ if (jsonResponse.error) {
+ reject(new Error(jsonResponse.error.message || 'Groq API Error'));
+ } else {
+ resolve(jsonResponse.choices[0]?.message?.content);
+ }
+ } catch (error) {
+ reject(new Error('Failed to parse Groq API response'));
+ }
+ });
+ });
+
+ req.on('error', (error) => {
+ reject(error);
+ });
+
+ req.write(JSON.stringify(data));
+ req.end();
+ });
+}
+
+export async function GroqAPI(messages: any[]) {
+ const configManager = ConfigurationManager.getInstance();
+ const apiKey = configManager.getConfig(ConfigKeys.GROQ_API_KEY);
+ const model = configManager.getConfig(ConfigKeys.GROQ_MODEL);
+ const temperature = configManager.getConfig(ConfigKeys.GROQ_TEMPERATURE, 0.7);
+
+ if (!apiKey) {
+ throw new Error('Groq API Key not configured');
+ }
+
+ const data = {
+ model: model || 'llama-3.3-70b-versatile',
+ messages: messages,
+ temperature
+ };
+
+ return makeGroqRequest(data, apiKey);
+}