Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 34 additions & 2 deletions backend/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"groq-sdk": "^0.5.0",
"jsonwebtoken": "^9.0.2",
"node-cron": "^3.0.3",
"openai": "^4.47.0",
Expand Down
18 changes: 15 additions & 3 deletions backend/src/models/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ export interface NotificationSettings {
export interface AISettings {
ai_enabled: boolean;
ai_verification_enabled: boolean;
ai_provider: 'anthropic' | 'openai' | 'ollama' | 'gemini' | null;
ai_provider: 'anthropic' | 'openai' | 'ollama' | 'gemini' | 'groq' | null;
anthropic_api_key: string | null;
anthropic_model: string | null;
openai_api_key: string | null;
Expand All @@ -52,6 +52,8 @@ export interface AISettings {
ollama_model: string | null;
gemini_api_key: string | null;
gemini_model: string | null;
groq_api_key: string | null;
groq_model: string | null;
}

export const userQueries = {
Expand Down Expand Up @@ -248,7 +250,8 @@ export const userQueries = {
const result = await pool.query(
`SELECT ai_enabled, COALESCE(ai_verification_enabled, false) as ai_verification_enabled,
ai_provider, anthropic_api_key, anthropic_model, openai_api_key, openai_model,
ollama_base_url, ollama_model, gemini_api_key, gemini_model
ollama_base_url, ollama_model, gemini_api_key, gemini_model,
groq_api_key, groq_model
FROM users WHERE id = $1`,
[id]
);
Expand Down Expand Up @@ -307,6 +310,14 @@ export const userQueries = {
fields.push(`gemini_model = $${paramIndex++}`);
values.push(settings.gemini_model);
}
if (settings.groq_api_key !== undefined) {
fields.push(`groq_api_key = $${paramIndex++}`);
values.push(settings.groq_api_key);
}
if (settings.groq_model !== undefined) {
fields.push(`groq_model = $${paramIndex++}`);
values.push(settings.groq_model);
}

if (fields.length === 0) return null;

Expand All @@ -315,7 +326,8 @@ export const userQueries = {
`UPDATE users SET ${fields.join(', ')} WHERE id = $${paramIndex}
RETURNING ai_enabled, COALESCE(ai_verification_enabled, false) as ai_verification_enabled,
ai_provider, anthropic_api_key, anthropic_model, openai_api_key, openai_model,
ollama_base_url, ollama_model, gemini_api_key, gemini_model`,
ollama_base_url, ollama_model, gemini_api_key, gemini_model,
groq_api_key, groq_model`,
values
);
return result.rows[0] || null;
Expand Down
50 changes: 50 additions & 0 deletions backend/src/routes/settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,8 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
ollama_model: settings.ollama_model || null,
gemini_api_key: settings.gemini_api_key || null,
gemini_model: settings.gemini_model || null,
groq_api_key: settings.groq_api_key || null,
groq_model: settings.groq_model || null,
});
} catch (error) {
console.error('Error fetching AI settings:', error);
Expand All @@ -362,6 +364,8 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ollama_model,
gemini_api_key,
gemini_model,
groq_api_key,
groq_model,
} = req.body;

const settings = await userQueries.updateAISettings(userId, {
Expand All @@ -376,6 +380,8 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ollama_model,
gemini_api_key,
gemini_model,
groq_api_key,
groq_model,
});

if (!settings) {
Expand All @@ -395,6 +401,8 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ollama_model: settings.ollama_model || null,
gemini_api_key: settings.gemini_api_key || null,
gemini_model: settings.gemini_model || null,
groq_api_key: settings.groq_api_key || null,
groq_model: settings.groq_model || null,
message: 'AI settings updated successfully',
});
} catch (error) {
Expand Down Expand Up @@ -520,4 +528,46 @@ router.post('/ai/test-gemini', async (req: AuthRequest, res: Response) => {
}
});

// Test Groq API key
router.post('/ai/test-groq', async (req: AuthRequest, res: Response) => {
try {
const { api_key } = req.body;

if (!api_key) {
res.status(400).json({ error: 'API key is required' });
return;
}

const Groq = (await import('groq-sdk')).default;
const groq = new Groq({ apiKey: api_key });

// Try to generate a simple response to verify the key works
await groq.chat.completions.create({
model: 'llama-3.3-70b-versatile',
max_tokens: 10,
messages: [{ role: 'user', content: 'Say "API key valid" in 3 words or less' }],
});

res.json({
success: true,
message: 'Successfully connected to Groq API',
});
} catch (error) {
console.error('Error testing Groq connection:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';

if (errorMessage.includes('401') || errorMessage.includes('invalid') || errorMessage.includes('API key')) {
res.status(400).json({
error: 'Invalid API key. Please check your Groq API key.',
success: false,
});
} else {
res.status(500).json({
error: `Failed to connect to Groq: ${errorMessage}`,
success: false,
});
}
}
});

export default router;
Loading