diff --git a/components/SettingsModal.tsx b/components/SettingsModal.tsx index b111a5d..ecc3894 100644 --- a/components/SettingsModal.tsx +++ b/components/SettingsModal.tsx @@ -1,12 +1,12 @@ // components/SettingsModal.tsx -// version 0.0.46 +// version 0.0.48 import React, { useState, useEffect, useRef, useCallback } from 'react'; import { XMarkIcon, CogIcon, CheckCircleIcon, ArrowPathIcon } from './Icons.tsx'; import { Spinner } from './Spinner.tsx'; import { useSettings } from '../contexts/SettingsProvider.tsx'; import { testApi } from '../services/Service.ts'; -import { OPEN_ROUTER_MODELS } from '../constants.ts'; -import type { ApiKeys } from '../types.ts'; +import { OPEN_ROUTER_MODELS, API_PROVIDER_LABELS, DEFAULT_LOCAL_AI_URL, DEFAULT_LOCAL_AI_MODEL } from '../constants.ts'; +import type { ApiKeys, ApiProvider, LocalAiConfig } from '../types.ts'; interface SettingsModalProps { isOpen: boolean; @@ -15,20 +15,24 @@ interface SettingsModalProps { export const SettingsModal: React.FC = ({ isOpen, onClose }) => { const { + apiProvider: globalApiProvider, setApiProvider: setGlobalApiProvider, apiKeys: globalApiKeys, setApiKeys: setGlobalApiKeys, openRouterModel: globalOpenRouterModel, setOpenRouterModel: setGlobalOpenRouterModel, + localAiConfig: globalLocalAiConfig, setLocalAiConfig: setGlobalLocalAiConfig, saveApiKeys: globalSaveApiKeys, setSaveApiKeys: setGlobalSaveApiKeys } = useSettings(); // Local state for the modal form + const [localApiProvider, setLocalApiProvider] = useState(globalApiProvider); const [localApiKeys, setLocalApiKeys] = useState(globalApiKeys); const [localOpenRouterModel, setLocalOpenRouterModel] = useState(globalOpenRouterModel); + const [localLocalAiConfig, setLocalLocalAiConfig] = useState(globalLocalAiConfig); const [localSaveApiKeys, setLocalSaveApiKeys] = useState(globalSaveApiKeys); const [isTesting, setIsTesting] = useState(false); const [testResult, setTestResult] = useState<{success: boolean, message: string} | null>(null); const [isKeyValidated, setIsKeyValidated] = useState(false); - const openRouterInputRef = useRef(null); + const apiKeyInputRef = useRef(null); // State for dynamic OpenRouter models const [openRouterModels, setOpenRouterModels] = useState(OPEN_ROUTER_MODELS); @@ -38,19 +42,21 @@ export const SettingsModal: React.FC = ({ isOpen, onClose }) // Sync local state with global context when modal opens or global state changes useEffect(() => { if (isOpen) { + setLocalApiProvider(globalApiProvider); setLocalApiKeys(globalApiKeys); setLocalOpenRouterModel(globalOpenRouterModel); + setLocalLocalAiConfig(globalLocalAiConfig); setLocalSaveApiKeys(globalSaveApiKeys); setTestResult(null); setIsKeyValidated(false); } - }, [isOpen, globalApiKeys, globalOpenRouterModel, globalSaveApiKeys]); + }, [isOpen, globalApiProvider, globalApiKeys, globalOpenRouterModel, globalLocalAiConfig, globalSaveApiKeys]); // Focus input when modal opens useEffect(() => { if (isOpen) { setTimeout(() => { - openRouterInputRef.current?.focus(); + apiKeyInputRef.current?.focus(); }, 100); } }, [isOpen]); @@ -106,35 +112,54 @@ export const SettingsModal: React.FC = ({ isOpen, onClose }) }, [localOpenRouterModel]); useEffect(() => { - if (isOpen) { + if (isOpen && localApiProvider === 'openrouter') { fetchOpenRouterModels(); } - }, [isOpen, fetchOpenRouterModels]); + }, [isOpen, localApiProvider, fetchOpenRouterModels]); const handleSaveSettings = () => { - // Only require validation if a key is actually present. Allows saving an empty key. - if (localApiKeys.openrouter.trim() && !isKeyValidated) { - setTestResult({success: false, message: 'Please validate the new API key successfully before saving.'}); - return; + // For local AI, no key is required + if (localApiProvider === 'openrouter') { + if (localApiKeys.openrouter.trim() && !isKeyValidated) { + setTestResult({success: false, message: 'Please validate the new API key successfully before saving.'}); + return; + } } + setGlobalApiProvider(localApiProvider); setGlobalApiKeys(localApiKeys); setGlobalOpenRouterModel(localOpenRouterModel); + setGlobalLocalAiConfig(localLocalAiConfig); setGlobalSaveApiKeys(localSaveApiKeys); onClose(); }; const handleTestApi = async () => { - const keyToTest = localApiKeys.openrouter; - const modelToTest = localOpenRouterModel; + let keyToTest = ''; + let modelToTest = ''; + let baseUrl: string | undefined; + + if (localApiProvider === 'openrouter') { + keyToTest = localApiKeys.openrouter; + modelToTest = localOpenRouterModel; + } else { + keyToTest = localApiKeys.localai; + modelToTest = localLocalAiConfig.model; + baseUrl = localLocalAiConfig.baseUrl; + } - if (!keyToTest) { + if (localApiProvider === 'openrouter' && !keyToTest) { setTestResult({ success: false, message: 'API Key must be provided.' }); return; } + if (localApiProvider === 'localai' && !localLocalAiConfig.baseUrl) { + setTestResult({ success: false, message: 'Base URL must be provided for Local AI.' }); + return; + } + setIsKeyValidated(false); setIsTesting(true); setTestResult(null); - const result = await testApi(keyToTest, modelToTest); + const result = await testApi(keyToTest, modelToTest, localApiProvider, baseUrl); setIsTesting(false); setTestResult({ success: result.success, message: result.success ? 'API connection successful!' : `Test failed: ${result.error}` }); if (result.success) { @@ -144,11 +169,149 @@ export const SettingsModal: React.FC = ({ isOpen, onClose }) if (!isOpen) return null; - const currentKey = localApiKeys.openrouter; + const getCurrentApiKey = () => { + if (localApiProvider === 'openrouter') return localApiKeys.openrouter; + return localApiKeys.localai; + }; + + const updateApiKey = (value: string) => { + const key = localApiProvider === 'openrouter' ? 'openrouter' : 'localai'; + setLocalApiKeys({ ...localApiKeys, [key]: value }); + setIsKeyValidated(false); + setTestResult(null); + }; + + const renderProviderSettings = () => { + switch (localApiProvider) { + case 'openrouter': + return ( + <> +
+ +
+ updateApiKey(e.target.value)} + placeholder="Enter your OpenRouter key (sk-or-v1...)" + className="w-full pl-4 pr-12 py-2 bg-control-bg border-2 border-control-border rounded-lg text-text-primary focus:ring-2 focus:ring-purple-500 focus:border-purple-500 focus:outline-none transition-colors" + /> + {isKeyValidated && localApiKeys.openrouter && ( + + )} +
+
+ +
+ +
+ + +
+ {fetchModelsError &&

{fetchModelsError}

} +

+ Recommendation: For best results, use google/gemini-2.5-flash. The prompts in this tool have been highly optimized for it. +

+
+ + ); + + case 'localai': + return ( + <> +
+ + { + setLocalLocalAiConfig({ ...localLocalAiConfig, baseUrl: e.target.value }); + setIsKeyValidated(false); + setTestResult(null); + }} + placeholder={DEFAULT_LOCAL_AI_URL} + className="w-full pl-4 pr-4 py-2 bg-control-bg border-2 border-control-border rounded-lg text-text-primary focus:ring-2 focus:ring-purple-500 focus:border-purple-500 focus:outline-none transition-colors" + /> +

+ Example: http://localhost:4000/v1/chat/completions or http://localhost:11434/v1/chat/completions +

+
+ +
+ + { + setLocalLocalAiConfig({ ...localLocalAiConfig, model: e.target.value }); + setIsKeyValidated(false); + setTestResult(null); + }} + placeholder={DEFAULT_LOCAL_AI_MODEL} + className="w-full pl-4 pr-4 py-2 bg-control-bg border-2 border-control-border rounded-lg text-text-primary focus:ring-2 focus:ring-purple-500 focus:border-purple-500 focus:outline-none transition-colors" + /> +

+ Examples: llama3.2, mistral, gpt-3.5-turbo +

+
+ +
+ +
+ updateApiKey(e.target.value)} + placeholder="Leave empty if not required" + className="w-full pl-4 pr-12 py-2 bg-control-bg border-2 border-control-border rounded-lg text-text-primary focus:ring-2 focus:ring-purple-500 focus:border-purple-500 focus:outline-none transition-colors" + /> + {isKeyValidated && ( + + )} +
+
+ +
+

+ Local AI / LiteLLM: Use this option for self-hosted LLMs like Ollama, LiteLLM proxy, or any OpenAI-compatible API endpoint. +

+
+ + ); + } + }; return (
-
e.stopPropagation()}> +
e.stopPropagation()}>
@@ -159,60 +322,40 @@ export const SettingsModal: React.FC = ({ isOpen, onClose })
+ {/* API Provider Selection */}
-
@@ -261,4 +405,4 @@ export const SettingsModal: React.FC = ({ isOpen, onClose })
); -}; \ No newline at end of file +}; diff --git a/constants.ts b/constants.ts index 401bb3d..98042ee 100644 --- a/constants.ts +++ b/constants.ts @@ -13,6 +13,16 @@ export const OPEN_ROUTER_MODELS = [ 'openai/gpt-3.5-turbo', ]; +// Default Local AI / LiteLLM settings +export const DEFAULT_LOCAL_AI_URL = 'http://localhost:4000/v1/chat/completions'; +export const DEFAULT_LOCAL_AI_MODEL = 'gpt-3.5-turbo'; + +// API Provider labels for UI +export const API_PROVIDER_LABELS = { + openrouter: 'OpenRouter', + localai: 'Local AI / LiteLLM', +} as const; + export const SEVERITY_STYLES: Record = { [Severity.CRITICAL]: { diff --git a/contexts/SettingsProvider.tsx b/contexts/SettingsProvider.tsx index 859a8bc..34cb516 100644 --- a/contexts/SettingsProvider.tsx +++ b/contexts/SettingsProvider.tsx @@ -1,15 +1,19 @@ // contexts/SettingsProvider.tsx import React, { createContext, useContext, useState, useEffect, useMemo } from 'react'; -import { ApiKeys } from '../types.ts'; -import { OPEN_ROUTER_MODELS } from '../constants.ts'; +import { ApiKeys, ApiProvider, LocalAiConfig } from '../types.ts'; +import { OPEN_ROUTER_MODELS, DEFAULT_LOCAL_AI_URL, DEFAULT_LOCAL_AI_MODEL } from '../constants.ts'; interface SettingsContextType { theme: 'light' | 'dark'; setTheme: (theme: 'light' | 'dark') => void; + apiProvider: ApiProvider; + setApiProvider: (provider: ApiProvider) => void; apiKeys: ApiKeys; setApiKeys: (keys: ApiKeys) => void; openRouterModel: string; setOpenRouterModel: (model: string) => void; + localAiConfig: LocalAiConfig; + setLocalAiConfig: (config: LocalAiConfig) => void; saveApiKeys: boolean; setSaveApiKeys: (save: boolean) => void; } @@ -18,8 +22,13 @@ const SettingsContext = createContext(undefined export const SettingsProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => { const [theme, setTheme] = useState<'light' | 'dark'>('dark'); - const [apiKeys, setApiKeys] = useState({ openrouter: '' }); + const [apiProvider, setApiProvider] = useState('openrouter'); + const [apiKeys, setApiKeys] = useState({ openrouter: '', localai: '' }); const [openRouterModel, setOpenRouterModel] = useState(OPEN_ROUTER_MODELS[0]); + const [localAiConfig, setLocalAiConfig] = useState({ + baseUrl: DEFAULT_LOCAL_AI_URL, + model: DEFAULT_LOCAL_AI_MODEL, + }); const [saveApiKeys, setSaveApiKeys] = useState(false); useEffect(() => { @@ -31,6 +40,9 @@ export const SettingsProvider: React.FC<{ children: React.ReactNode }> = ({ chil setTheme('dark'); } + const savedProvider = localStorage.getItem('apiProvider') as ApiProvider | null; + if (savedProvider) setApiProvider(savedProvider); + const savedSavePref = localStorage.getItem('saveApiKeys') === 'true'; setSaveApiKeys(savedSavePref); @@ -39,8 +51,11 @@ export const SettingsProvider: React.FC<{ children: React.ReactNode }> = ({ chil if (savedKeys) setApiKeys(JSON.parse(savedKeys)); } - const savedModel = localStorage.getItem('openRouterModel'); - if (savedModel) setOpenRouterModel(savedModel); + const savedOpenRouterModel = localStorage.getItem('openRouterModel'); + if (savedOpenRouterModel) setOpenRouterModel(savedOpenRouterModel); + + const savedLocalAiConfig = localStorage.getItem('localAiConfig'); + if (savedLocalAiConfig) setLocalAiConfig(JSON.parse(savedLocalAiConfig)); } catch (e) { console.error("Could not load settings:", e); } }, []); @@ -53,6 +68,11 @@ export const SettingsProvider: React.FC<{ children: React.ReactNode }> = ({ chil catch (e) { console.error("Could not save theme:", e); } }, [theme]); + useEffect(() => { + try { localStorage.setItem('apiProvider', apiProvider); } + catch (e) { console.error("Could not save API provider:", e); } + }, [apiProvider]); + useEffect(() => { try { localStorage.setItem('saveApiKeys', String(saveApiKeys)); @@ -66,15 +86,22 @@ export const SettingsProvider: React.FC<{ children: React.ReactNode }> = ({ chil useEffect(() => { try { localStorage.setItem('openRouterModel', openRouterModel); } - catch (e) { console.error("Could not save model:", e); } + catch (e) { console.error("Could not save OpenRouter model:", e); } }, [openRouterModel]); + + useEffect(() => { + try { localStorage.setItem('localAiConfig', JSON.stringify(localAiConfig)); } + catch (e) { console.error("Could not save Local AI config:", e); } + }, [localAiConfig]); const value = useMemo(() => ({ theme, setTheme, + apiProvider, setApiProvider, apiKeys, setApiKeys, openRouterModel, setOpenRouterModel, + localAiConfig, setLocalAiConfig, saveApiKeys, setSaveApiKeys, - }), [theme, apiKeys, openRouterModel, saveApiKeys]); + }), [theme, apiProvider, apiKeys, openRouterModel, localAiConfig, saveApiKeys]); return {children}; }; @@ -85,4 +112,4 @@ export const useSettings = (): SettingsContextType => { throw new Error('useSettings must be used within a SettingsProvider'); } return context; -}; \ No newline at end of file +}; diff --git a/dockerizer.sh b/dockerizer.sh old mode 100644 new mode 100755 index 33fff65..15c78ce --- a/dockerizer.sh +++ b/dockerizer.sh @@ -1,19 +1,31 @@ #!/bin/bash +# Determine which docker compose command is available +if docker compose version &> /dev/null; then + DOCKER_COMPOSE_CMD="docker compose" +elif docker-compose version &> /dev/null; then + DOCKER_COMPOSE_CMD="docker-compose" +else + echo "❌ Neither 'docker compose' nor 'docker-compose' is available." + echo "💡 Please install Docker Compose." + exit 1 +fi + +echo "--- Using: $DOCKER_COMPOSE_CMD ---" echo "--- Stopping any previous containers... ---" -docker-compose -f docker-compose.yml down -v +$DOCKER_COMPOSE_CMD -f docker-compose.yml down -v if [ $? -ne 0 ]; then - echo "Warning: 'docker-compose down' failed. This might be the first run, which is okay. Continuing..." + echo "Warning: 'docker compose down' failed. This might be the first run, which is okay. Continuing..." fi echo "--- Building and starting the application... ---" -docker-compose -f docker-compose.yml up --build -d +$DOCKER_COMPOSE_CMD -f docker-compose.yml up --build -d if [ $? -eq 0 ]; then echo "--- Application is now running! ---" echo "Access it at: http://localhost:6869" - echo "To stop the application, run: docker-compose -f docker-compose.yml down" + echo "To stop the application, run: $DOCKER_COMPOSE_CMD -f docker-compose.yml down" # === Try to launch Firefox with checks === sleep 3 # Wait a bit for the server to start diff --git a/hooks/useApiOptions.ts b/hooks/useApiOptions.ts index 9d76f0e..3de26a3 100644 --- a/hooks/useApiOptions.ts +++ b/hooks/useApiOptions.ts @@ -7,21 +7,46 @@ export const useApiOptions = (): { apiOptions: ApiOptions | null; isApiKeySet: boolean; } => { - const { apiKeys, openRouterModel } = useSettings(); - const isApiKeySet = !!apiKeys.openrouter?.trim(); + const { apiProvider, apiKeys, openRouterModel, localAiConfig } = useSettings(); + + const isApiKeySet = useMemo(() => { + switch (apiProvider) { + case 'openrouter': + return !!apiKeys.openrouter?.trim(); + case 'localai': + // For local AI, we just need a base URL (API key is optional) + return !!localAiConfig.baseUrl?.trim(); + default: + return false; + } + }, [apiProvider, apiKeys, localAiConfig.baseUrl]); const apiOptions = useMemo(() => { if (!isApiKeySet) { return null; } - return { - apiKey: apiKeys.openrouter, - model: openRouterModel, - }; - }, [isApiKeySet, apiKeys.openrouter, openRouterModel]); + + switch (apiProvider) { + case 'openrouter': + return { + provider: 'openrouter' as const, + apiKey: apiKeys.openrouter, + model: openRouterModel, + }; + case 'localai': + return { + provider: 'localai' as const, + apiKey: apiKeys.localai || '', + model: localAiConfig.model, + baseUrl: localAiConfig.baseUrl, + }; + default: + return null; + } + }, [isApiKeySet, apiProvider, apiKeys, openRouterModel, localAiConfig]); return { apiOptions, isApiKeySet }; -}; \ No newline at end of file +}; diff --git a/services/Service.ts b/services/Service.ts index bb2da96..ddefd15 100644 --- a/services/Service.ts +++ b/services/Service.ts @@ -1,11 +1,11 @@ // @author: Albert C | @yz9yt | github.com/yz9yt // services/Service.ts -// version 0.1 Beta +// version 0.2 Beta - Multi-provider support import { ApiOptions, Vulnerability, VulnerabilityReport, XssPayloadResult, ForgedPayloadResult, ChatMessage, ExploitContext, HeadersReport, DomXssAnalysisResult, FileUploadAnalysisResult, DastScanType, SqlmapCommandResult, - Severity + Severity, ApiProvider } from '../types.ts'; import { createSastAnalysisPrompt, @@ -41,14 +41,45 @@ import { resetContinuousFailureCount, } from '../utils/apiManager.ts'; -const OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions"; +// API endpoint URLs +const API_URLS = { + openrouter: "https://openrouter.ai/api/v1/chat/completions", + localai: "", // Will be provided by user +}; + +const getApiUrl = (options: ApiOptions): string => { + if (options.provider === 'localai' && options.baseUrl) { + return options.baseUrl; + } + return API_URLS[options.provider]; +}; + +const getAuthHeader = (options: ApiOptions): Record => { + const headers: Record = { + 'Content-Type': 'application/json' + }; + + if (options.apiKey) { + headers['Authorization'] = `Bearer ${options.apiKey}`; + } + + return headers; +}; const callApi = async (prompt: string, options: ApiOptions, isJson: boolean = true) => { await enforceRateLimit(); - const { apiKey, model } = options; - if (!apiKey) { + const { apiKey, model, provider } = options; + + // For non-local providers, API key is required + if (provider !== 'localai' && !apiKey) { throw new Error("API Key is not configured."); } + + const apiUrl = getApiUrl(options); + if (!apiUrl) { + throw new Error("API URL is not configured."); + } + const signal = getNewAbortSignal(); try { @@ -56,12 +87,9 @@ const callApi = async (prompt: string, options: ApiOptions, isJson: boolean = tr updateRateLimitTimestamp(); incrementApiCallCount(); - const response = await fetch(OPENROUTER_API_URL, { + const response = await fetch(apiUrl, { method: 'POST', - headers: { - 'Authorization': `Bearer ${apiKey}`, - 'Content-Type': 'application/json' - }, + headers: getAuthHeader(options), body: JSON.stringify({ model: model, messages: [{ role: 'user', content: prompt }], @@ -91,7 +119,7 @@ const callApi = async (prompt: string, options: ApiOptions, isJson: boolean = tr console.log("API request was cancelled."); throw new Error("Request cancelled."); } - console.error("Error calling OpenRouter:", error); + console.error("Error calling API:", error); throw new Error(error.message || "An unknown error occurred while contacting the AI service."); } finally { setRequestStatus('idle'); @@ -336,10 +364,17 @@ export const generateSstiPayloads = async (engine: string, goal: string, options // --- Chat Functions --- const callOpenRouterChat = async (history: ChatMessage[], options: ApiOptions) => { await enforceRateLimit(); - const { apiKey, model } = options; - if (!apiKey) { + const { apiKey, model, provider } = options; + + if (provider !== 'localai' && !apiKey) { throw new Error("API Key is not configured."); } + + const apiUrl = getApiUrl(options); + if (!apiUrl) { + throw new Error("API URL is not configured."); + } + const signal = getNewAbortSignal(); try { @@ -347,12 +382,9 @@ const callOpenRouterChat = async (history: ChatMessage[], options: ApiOptions) = updateRateLimitTimestamp(); incrementApiCallCount(); - const response = await fetch(OPENROUTER_API_URL, { + const response = await fetch(apiUrl, { method: 'POST', - headers: { - 'Authorization': `Bearer ${apiKey}`, - 'Content-Type': 'application/json' - }, + headers: getAuthHeader(options), body: JSON.stringify({ model: model, messages: history.map(({ role, content }) => ({ role, content })), @@ -375,7 +407,7 @@ const callOpenRouterChat = async (history: ChatMessage[], options: ApiOptions) = console.log("Chat API request was cancelled."); throw new Error("Request cancelled."); } - console.error("Error calling OpenRouter Chat:", error); + console.error("Error calling Chat API:", error); throw new Error(error.message || "An unknown error occurred while contacting the AI service."); } finally { setRequestStatus('idle'); @@ -417,18 +449,39 @@ export const continueGeneralChat = async (systemPrompt: string, history: ChatMes return callOpenRouterChat(fullHistory, options); }; -export const testApi = async (apiKey: string, model: string): Promise<{ success: boolean; error?: string }> => { - if (!apiKey.startsWith('sk-or-')) { +export const testApi = async (apiKey: string, model: string, provider: ApiProvider = 'openrouter', baseUrl?: string): Promise<{ success: boolean; error?: string }> => { + // Validation based on provider + if (provider === 'openrouter' && apiKey && !apiKey.startsWith('sk-or-')) { return { success: false, error: 'Invalid OpenRouter API key format. It should start with "sk-or-".' }; } + if (provider === 'localai' && !baseUrl) { + return { success: false, error: 'Base URL is required for Local AI.' }; + } + + if (provider !== 'localai' && !apiKey) { + return { success: false, error: 'API Key is required.' }; + } + + let apiUrl: string; + if (provider === 'localai') { + apiUrl = baseUrl!; + } else { + apiUrl = API_URLS[provider]; + } + + const headers: Record = { + 'Content-Type': 'application/json' + }; + + if (apiKey) { + headers['Authorization'] = `Bearer ${apiKey}`; + } + try { - const response = await fetch(OPENROUTER_API_URL, { + const response = await fetch(apiUrl, { method: 'POST', - headers: { - 'Authorization': `Bearer ${apiKey}`, - 'Content-Type': 'application/json' - }, + headers, body: JSON.stringify({ model: model, messages: [{ role: 'user', content: 'Test prompt' }], diff --git a/types.ts b/types.ts index d75b149..26f3485 100644 --- a/types.ts +++ b/types.ts @@ -153,13 +153,23 @@ export interface FileUploadAnalysisResult { manualTestingGuide: string; } +export type ApiProvider = 'openrouter' | 'localai'; + export interface ApiKeys { openrouter: string; + localai: string; // Optional API key for local AI +} + +export interface LocalAiConfig { + baseUrl: string; + model: string; } export type ApiOptions = { + provider: ApiProvider; apiKey: string; model: string; + baseUrl?: string; // For Local AI / LiteLLM }; // New interface for vulnerability validation result