diff --git a/README.md b/README.md index 0bbf728..13df88b 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,10 @@ LSPRAG (Language Server Protocol-based AI Generation) is a cutting-edge VS Code - At Editor, click left-up `File` -> `Open Folder` -> Select workspace to `LSPRAG/src/test/fixtures/python` - [Optional] Test core utilities - You can check out your current setting by calling `Cmd/Cntrl + Shift + P => LSPRAG: Show Current Settings` + - You can also print current settings from terminal at workspace root: `npm run show:settings` - You can test your LLM avaialbility by calling `Cmd/Cntrl + Shift + P => LSPRAG: Test LLM` + - You can also test LLM from terminal at workspace root: `npm run test:llm` + - You can also generate a unit test from terminal: `npm run generate:cli -- --file src/test/fixtures/python/calculator.py --function compute` - You can test your Language Server avaialbility by calling `Cmd/Cntrl + Shift + P => LSPRAG: Test Language Server` 2. **Generate Tests** @@ -177,4 +180,3 @@ New to LSPRAG? Want to contribute? We've got you covered! --- **Ready to generate unit tests with LSPRAG!** 🎉 - diff --git a/package.json b/package.json index 1a0e013..80f3c86 100644 --- a/package.json +++ b/package.json @@ -129,6 +129,9 @@ "watch": "tsc -watch -p ./", "pretest": "npm run compile && npm run lint", "lint": "eslint src", + "generate:cli": "npm run compile && node ./out/cli/generateUnitTest.js", + "show:settings": "npm run compile && node ./out/cli/showSettings.js", + "test:llm": "npm run compile && node ./out/cli/testLLM.js", "lightWeightBuild": "node scripts/build.js", "build": "tsc", "test": "npm run compile && node ./out/test/runTest.js $TEST_FILE", diff --git a/src/cli/generateUnitTest.ts b/src/cli/generateUnitTest.ts new file mode 100644 index 0000000..5c2215b --- /dev/null +++ b/src/cli/generateUnitTest.ts @@ -0,0 +1,545 @@ +import * as cp from 'child_process'; +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import { Configuration, getConfigInstance, Provider } from '../config'; +import { + downloadAndUnzipVSCode, + resolveCliArgsFromVSCodeExecutablePath, + runTests +} from '@vscode/test-electron'; + +type GenerateCliOptions = { + workspace: string; + workspaceExplicit: boolean; + filePath: string; + functionName?: string; + line?: number; + character?: number; + provider?: Provider; + model?: string; + baseUrl?: string; + timeoutMs?: number; + savePath?: string; + vscodeVersion?: string; +}; + +const DEFAULT_VSCODE_VERSION = 'stable'; +const VSCODE_ENV_KEYS_TO_CLEAR = [ + 'VSCODE_IPC_HOOK_CLI', + 'VSCODE_GIT_IPC_HANDLE', + 'VSCODE_GIT_ASKPASS_NODE', + 'VSCODE_GIT_ASKPASS_EXTRA_ARGS', + 'VSCODE_GIT_ASKPASS_MAIN', + 'VSCODE_CWD' +]; + +function printUsage(): void { + console.log('Usage: npm run generate:cli -- --file PATH [--function NAME | --line N] [--character N] [--workspace PATH] [--provider openai|deepseek|local|ollama] [--model MODEL] [--base-url URL] [--timeout-ms MS] [--save-path PATH] [--vscode-version VERSION]'); + console.log('Examples:'); + console.log(' npm run generate:cli -- --file src/test/fixtures/python/calculator.py --function compute'); + console.log(' npm run generate:cli -- --file src/test/fixtures/python/calculator.py --line 3'); + console.log(' npm run generate:cli -- --file src/test/fixtures/python/calculator.py --function compute --vscode-version stable'); +} + +function normalizeProvider(value: string): Provider { + if (value === 'ollama') { + return 'local'; + } + + if (value === 'openai' || value === 'deepseek' || value === 'local') { + return value; + } + + throw new Error(`Unsupported provider: ${value}. Use openai, deepseek, local, or ollama.`); +} + +function requireValue(args: string[], index: number, flag: string): string { + const value = args[index + 1]; + if (!value || value.startsWith('--')) { + throw new Error(`Missing value for ${flag}`); + } + return value; +} + +function toNumber(value: string | undefined, flagName: string): number | undefined { + if (!value) { + return undefined; + } + + const parsed = Number(value); + if (!Number.isFinite(parsed)) { + throw new Error(`${flagName} must be a number`); + } + return parsed; +} + +function findNearestWorkspaceForFile(filePath: string): string { + const extension = path.extname(filePath).toLowerCase(); + const markersByExtension: Record = { + '.py': ['pyproject.toml', 'setup.py', 'setup.cfg', 'requirements.txt', 'Pipfile'], + '.go': ['go.mod'], + '.java': ['pom.xml', 'build.gradle', 'build.gradle.kts', 'settings.gradle', 'settings.gradle.kts'] + }; + const markers = markersByExtension[extension] ?? []; + let currentDir = path.dirname(filePath); + + while (true) { + if (markers.some(marker => fs.existsSync(path.join(currentDir, marker)))) { + return currentDir; + } + + const parentDir = path.dirname(currentDir); + if (parentDir === currentDir) { + return path.dirname(filePath); + } + currentDir = parentDir; + } +} + +function parseArgs(argv: string[]): GenerateCliOptions { + const options: Partial = {}; + + for (let index = 0; index < argv.length; index++) { + const arg = argv[index]; + if (arg === '--help' || arg === '-h') { + printUsage(); + process.exit(0); + } + + if (arg === '--workspace') { + options.workspace = requireValue(argv, index, arg); + index++; + continue; + } + if (arg.startsWith('--workspace=')) { + options.workspace = arg.slice('--workspace='.length); + continue; + } + + if (arg === '--file') { + options.filePath = requireValue(argv, index, arg); + index++; + continue; + } + if (arg.startsWith('--file=')) { + options.filePath = arg.slice('--file='.length); + continue; + } + + if (arg === '--function') { + options.functionName = requireValue(argv, index, arg); + index++; + continue; + } + if (arg.startsWith('--function=')) { + options.functionName = arg.slice('--function='.length); + continue; + } + + if (arg === '--line') { + options.line = toNumber(requireValue(argv, index, arg), 'line'); + index++; + continue; + } + if (arg.startsWith('--line=')) { + options.line = toNumber(arg.slice('--line='.length), 'line'); + continue; + } + + if (arg === '--character') { + options.character = toNumber(requireValue(argv, index, arg), 'character'); + index++; + continue; + } + if (arg.startsWith('--character=')) { + options.character = toNumber(arg.slice('--character='.length), 'character'); + continue; + } + + if (arg === '--provider') { + options.provider = normalizeProvider(requireValue(argv, index, arg)); + index++; + continue; + } + if (arg.startsWith('--provider=')) { + options.provider = normalizeProvider(arg.slice('--provider='.length)); + continue; + } + + if (arg === '--model') { + options.model = requireValue(argv, index, arg); + index++; + continue; + } + if (arg.startsWith('--model=')) { + options.model = arg.slice('--model='.length); + continue; + } + + if (arg === '--base-url') { + options.baseUrl = requireValue(argv, index, arg); + index++; + continue; + } + if (arg.startsWith('--base-url=')) { + options.baseUrl = arg.slice('--base-url='.length); + continue; + } + + if (arg === '--timeout-ms') { + options.timeoutMs = toNumber(requireValue(argv, index, arg), 'timeout-ms'); + index++; + continue; + } + if (arg.startsWith('--timeout-ms=')) { + options.timeoutMs = toNumber(arg.slice('--timeout-ms='.length), 'timeout-ms'); + continue; + } + + if (arg === '--save-path') { + options.savePath = requireValue(argv, index, arg); + index++; + continue; + } + if (arg.startsWith('--save-path=')) { + options.savePath = arg.slice('--save-path='.length); + continue; + } + + if (arg === '--vscode-version') { + options.vscodeVersion = requireValue(argv, index, arg); + index++; + continue; + } + if (arg.startsWith('--vscode-version=')) { + options.vscodeVersion = arg.slice('--vscode-version='.length); + continue; + } + + throw new Error(`Unknown argument: ${arg}`); + } + + if (!options.filePath) { + throw new Error('Missing required argument: --file'); + } + + if (!options.functionName && options.line === undefined) { + throw new Error('You must provide either --function or --line'); + } + + const workspace = path.resolve(options.workspace || process.cwd()); + const filePath = path.isAbsolute(options.filePath) + ? options.filePath + : path.resolve(workspace, options.filePath); + const inferredWorkspace = options.workspace + ? path.resolve(options.workspace) + : findNearestWorkspaceForFile(filePath); + + return { + workspace: inferredWorkspace, + workspaceExplicit: Boolean(options.workspace), + filePath, + functionName: options.functionName, + line: options.line, + character: options.character ?? 0, + provider: options.provider, + model: options.model, + baseUrl: options.baseUrl, + timeoutMs: options.timeoutMs, + savePath: options.savePath, + vscodeVersion: options.vscodeVersion || process.env.LSPRAG_CLI_VSCODE_VERSION || DEFAULT_VSCODE_VERSION + }; +} + +function installLanguageExtensions(cliPath: string, args: string[]): void { + cp.spawnSync( + cliPath, + [ + ...args, + '--install-extension', 'ms-python.python', + '--install-extension', 'ms-python.vscode-pylance', + '--install-extension', 'redhat.java', + '--install-extension', 'golang.go' + ], + { + encoding: 'utf-8', + stdio: 'inherit', + env: createIsolatedVSCodeEnv() + } + ); +} + +function createIsolatedVSCodeEnv(extraEnv: NodeJS.ProcessEnv = {}): NodeJS.ProcessEnv { + const env: NodeJS.ProcessEnv = { + ...process.env, + DONT_PROMPT_WSL_INSTALL: '1', + ...extraEnv + }; + + for (const key of VSCODE_ENV_KEYS_TO_CLEAR) { + env[key] = ''; + } + + return env; +} + +function getExtensionInstallRoots(): string[] { + const homeDir = os.homedir(); + return [ + path.join(homeDir, '.vscode-server', 'extensions'), + path.join(homeDir, '.vscode', 'extensions') + ]; +} + +function findInstalledExtensionDir(extensionId: string): string | null { + for (const installRoot of getExtensionInstallRoots()) { + if (!fs.existsSync(installRoot)) { + continue; + } + + const matchedDirs = fs.readdirSync(installRoot) + .filter(entry => entry === extensionId || entry.startsWith(`${extensionId}-`)) + .map(entry => path.join(installRoot, entry)) + .sort((left, right) => { + return fs.statSync(right).mtimeMs - fs.statSync(left).mtimeMs; + }); + if (matchedDirs.length > 0) { + return matchedDirs[0]; + } + } + + return null; +} + +function clearObsoleteMarkers(targetRoot: string, extensionIds: string[]): void { + const obsoletePath = path.join(targetRoot, '.obsolete'); + if (!fs.existsSync(obsoletePath)) { + return; + } + + try { + const raw = fs.readFileSync(obsoletePath, 'utf8').trim(); + if (!raw) { + return; + } + + const parsed = JSON.parse(raw) as Record; + let changed = false; + + for (const key of Object.keys(parsed)) { + if (extensionIds.some(extensionId => key === extensionId || key.startsWith(`${extensionId}-`))) { + delete parsed[key]; + changed = true; + } + } + + if (!changed) { + return; + } + + if (Object.keys(parsed).length === 0) { + fs.writeFileSync(obsoletePath, '{}', 'utf8'); + return; + } + + fs.writeFileSync(obsoletePath, JSON.stringify(parsed), 'utf8'); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.warn(`[LSPRAG] Failed to clean obsolete extension markers in ${targetRoot}: ${message}`); + } +} + +function syncExtensionsToCodeProfiles(extensionDevelopmentPath: string, extensionIds: string[]): void { + const targetDirs = [ + path.join(extensionDevelopmentPath, '.vscode-test', 'extensions'), + path.join(os.homedir(), '.vscode', 'extensions') + ]; + + targetDirs.forEach(targetDir => { + fs.mkdirSync(targetDir, { recursive: true }); + clearObsoleteMarkers(targetDir, extensionIds); + }); + + for (const extensionId of extensionIds) { + const sourceDir = findInstalledExtensionDir(extensionId); + if (!sourceDir) { + console.warn(`[LSPRAG] Installed extension not found in user directories: ${extensionId}`); + continue; + } + + for (const targetRoot of targetDirs) { + const targetDir = path.join(targetRoot, path.basename(sourceDir)); + fs.rmSync(targetDir, { recursive: true, force: true }); + fs.cpSync(sourceDir, targetDir, { recursive: true }); + console.log(`[LSPRAG] Synced extension into ${targetRoot}: ${path.basename(sourceDir)}`); + clearObsoleteMarkers(targetRoot, extensionIds); + } + } +} + +function buildResultFilePath(): string { + return path.join(os.tmpdir(), `lsprag-generate-${Date.now()}.json`); +} + +function buildCliConfig(options: GenerateCliOptions) { + process.env.LSPRAG_WORKSPACE = options.workspace; + if (options.provider) { + process.env.LSPRAG_PROVIDER = options.provider; + } + if (options.model) { + process.env.LSPRAG_MODEL = options.model; + } + if (options.baseUrl) { + process.env.LSPRAG_BASE_URL = options.baseUrl; + } + if (options.timeoutMs !== undefined) { + process.env.LSPRAG_TIMEOUT_MS = options.timeoutMs.toString(); + } + + Configuration.resetInstance(); + const config = getConfigInstance(); + config.updateConfig({ + workspace: options.workspace, + ...(options.provider ? { provider: options.provider } : {}), + ...(options.model ? { model: options.model } : {}), + ...(options.baseUrl ? { baseUrl: options.baseUrl } : {}), + ...(options.timeoutMs !== undefined ? { timeoutMs: options.timeoutMs } : {}), + ...(options.savePath ? { savePath: options.savePath } : {}) + }); + return config; +} + +function buildCredentialHintMessage(workspace: string, provider: Provider): string { + const settingsPath = path.join(workspace, '.vscode', 'settings.json'); + switch (provider) { + case 'openai': + return [ + 'OpenAI requires an API key before generation can start.', + `Set \`OPENAI_API_KEY\` in your shell, or add \`"LSPRAG.openaiApiKey"\` to ${settingsPath}.`, + 'You can also switch provider with `--provider deepseek` or `--provider ollama`.' + ].join(' '); + case 'deepseek': + return [ + 'DeepSeek requires an API key before generation can start.', + `Set \`DEEPSEEK_API_KEY\` in your shell, or add \`"LSPRAG.deepseekApiKey"\` to ${settingsPath}.`, + 'You can also switch provider with `--provider openai` or `--provider ollama`.' + ].join(' '); + case 'local': + return [ + 'Local/Ollama generation requires an endpoint URL before generation can start.', + `Set \`LOCAL_LLM_URL\` in your shell, or add \`"LSPRAG.localLLMUrl"\` to ${settingsPath}.`, + 'Example: `--provider ollama --model llama3 --base-url` is not enough without `LOCAL_LLM_URL`.' + ].join(' '); + } +} + +function validateLLMConfiguration(options: GenerateCliOptions): void { + const config = buildCliConfig(options); + console.log(`[LSPRAG] LLM: ${config.provider}/${config.model}`); + + switch (config.provider) { + case 'openai': + if (!config.openaiApiKey && !process.env.OPENAI_API_KEY) { + throw new Error(buildCredentialHintMessage(options.workspace, 'openai')); + } + return; + case 'deepseek': + if (!config.deepseekApiKey && !process.env.DEEPSEEK_API_KEY) { + throw new Error(buildCredentialHintMessage(options.workspace, 'deepseek')); + } + return; + case 'local': + if (!config.localLLMUrl && !process.env.LOCAL_LLM_URL) { + throw new Error(buildCredentialHintMessage(options.workspace, 'local')); + } + return; + } +} + +async function main(): Promise { + const options = parseArgs(process.argv.slice(2)); + const extensionDevelopmentPath = path.resolve(__dirname, '../../'); + const extensionTestsPath = path.resolve(__dirname, '../test/generateCliHost'); + const resultFile = buildResultFilePath(); + + console.log(`[LSPRAG] Workspace: ${options.workspace}`); + if (!options.workspaceExplicit) { + console.log('[LSPRAG] Workspace inferred from target file. Use --workspace to override.'); + } + console.log(`[LSPRAG] File: ${options.filePath}`); + if (options.functionName) { + console.log(`[LSPRAG] Function: ${options.functionName}`); + } + if (options.line !== undefined) { + console.log(`[LSPRAG] Line: ${options.line}`); + } + console.log(`[LSPRAG] VS Code version: ${options.vscodeVersion}`); + validateLLMConfiguration(options); + + const vscodeExecutablePath = await downloadAndUnzipVSCode(options.vscodeVersion || DEFAULT_VSCODE_VERSION); + const [cliPath, ...vscodeArgs] = resolveCliArgsFromVSCodeExecutablePath(vscodeExecutablePath); + installLanguageExtensions(cliPath, vscodeArgs); + syncExtensionsToCodeProfiles(extensionDevelopmentPath, [ + 'ms-python.python', + 'ms-python.vscode-pylance', + 'redhat.java', + 'golang.go' + ]); + + try { + await runTests({ + vscodeExecutablePath, + extensionDevelopmentPath, + extensionTestsPath, + launchArgs: [options.workspace], + extensionTestsEnv: { + ...createIsolatedVSCodeEnv(), + LSPRAG_CLI_WORKSPACE: options.workspace, + LSPRAG_CLI_FILE_PATH: options.filePath, + LSPRAG_CLI_FUNCTION_NAME: options.functionName || '', + LSPRAG_CLI_LINE: options.line?.toString() || '', + LSPRAG_CLI_CHARACTER: options.character?.toString() || '0', + LSPRAG_CLI_PROVIDER: options.provider || '', + LSPRAG_CLI_MODEL: options.model || '', + LSPRAG_CLI_BASE_URL: options.baseUrl || '', + LSPRAG_CLI_TIMEOUT_MS: options.timeoutMs?.toString() || '', + LSPRAG_CLI_SAVE_PATH: options.savePath || '', + LSPRAG_CLI_RESULT_FILE: resultFile, + OPENAI_API_KEY: process.env.OPENAI_API_KEY || '', + DEEPSEEK_API_KEY: process.env.DEEPSEEK_API_KEY || '', + LOCAL_LLM_URL: process.env.LOCAL_LLM_URL || '', + HTTP_PROXY: process.env.HTTP_PROXY || '', + HTTPS_PROXY: process.env.HTTPS_PROXY || '' + } + }); + } catch (error) { + if (fs.existsSync(resultFile)) { + const failedResult = JSON.parse(fs.readFileSync(resultFile, 'utf8')) as { error?: string }; + if (failedResult.error) { + throw new Error(failedResult.error); + } + } + throw error; + } + + const rawResult = fs.readFileSync(resultFile, 'utf8'); + const parsedResult = JSON.parse(rawResult) as { + ok: boolean; + error?: string; + savedFilePath?: string; + functionName?: string; + }; + + if (!parsedResult.ok || !parsedResult.savedFilePath) { + throw new Error(parsedResult.error || 'Generation did not produce an output file.'); + } + + console.log(`[LSPRAG] Generated ${parsedResult.functionName} -> ${parsedResult.savedFilePath}`); +} + +main().catch((error) => { + const errorMessage = error instanceof Error ? error.message : String(error); + console.error(`[LSPRAG] Generate Unit Test failed: ${errorMessage}`); + process.exit(1); +}); diff --git a/src/cli/showSettings.ts b/src/cli/showSettings.ts new file mode 100644 index 0000000..229b06c --- /dev/null +++ b/src/cli/showSettings.ts @@ -0,0 +1,66 @@ +import * as path from 'path'; +import { Configuration } from '../config'; +import { getCurrentSettingsText } from '../currentSettings'; + +type CliOverrides = { + workspace?: string; +}; + +function printUsage(): void { + console.log('Usage: npm run show:settings -- [--workspace PATH]'); + console.log('Examples:'); + console.log(' npm run show:settings'); + console.log(' npm run show:settings -- --workspace src/test/fixtures/python'); +} + +function requireValue(args: string[], index: number, flag: string): string { + const value = args[index + 1]; + if (!value || value.startsWith('--')) { + throw new Error(`Missing value for ${flag}`); + } + return value; +} + +function parseCliArgs(args: string[]): CliOverrides { + const overrides: CliOverrides = {}; + + for (let index = 0; index < args.length; index++) { + const arg = args[index]; + if (arg === '--help' || arg === '-h') { + printUsage(); + process.exit(0); + } + + if (arg === '--workspace') { + overrides.workspace = requireValue(args, index, arg); + index++; + continue; + } + + if (arg.startsWith('--workspace=')) { + overrides.workspace = arg.slice('--workspace='.length); + continue; + } + + throw new Error(`Unknown argument: ${arg}`); + } + + return overrides; +} + +async function main(): Promise { + const overrides = parseCliArgs(process.argv.slice(2)); + const workspace = path.resolve(overrides.workspace || process.cwd()); + process.env.LSPRAG_WORKSPACE = workspace; + + Configuration.resetInstance(); + + console.log('[LSPRAG] Current Settings'); + console.log(getCurrentSettingsText()); +} + +main().catch((error) => { + const errorMessage = error instanceof Error ? error.message : String(error); + console.error(`[LSPRAG] Show Settings failed: ${errorMessage}`); + process.exit(1); +}); diff --git a/src/cli/testLLM.ts b/src/cli/testLLM.ts new file mode 100644 index 0000000..54b6b0c --- /dev/null +++ b/src/cli/testLLM.ts @@ -0,0 +1,157 @@ +import { Configuration, getConfigInstance, Provider } from '../config'; +import { runLLMHealthcheck } from '../llmHealthcheck'; + +type CliOverrides = { + workspace?: string; + provider?: Provider; + model?: string; + baseUrl?: string; + timeoutMs?: number; + localLLMUrl?: string; +}; + +function printUsage(): void { + console.log('Usage: npm run test:llm -- [--provider openai|deepseek|local|ollama] [--model MODEL] [--base-url URL] [--timeout-ms MS] [--workspace PATH] [--local-llm-url URL]'); + console.log('Environment overrides: OPENAI_API_KEY, DEEPSEEK_API_KEY, LOCAL_LLM_URL, LSPRAG_PROVIDER, LSPRAG_MODEL, LSPRAG_BASE_URL, LSPRAG_TIMEOUT_MS'); + console.log('Workspace .vscode/settings.json is also read automatically when available.'); +} + +function normalizeProvider(value: string): Provider { + if (value === 'ollama') { + return 'local'; + } + + if (value === 'openai' || value === 'deepseek' || value === 'local') { + return value; + } + + throw new Error(`Unsupported provider: ${value}`); +} + +function requireValue(args: string[], index: number, flag: string): string { + const value = args[index + 1]; + if (!value || value.startsWith('--')) { + throw new Error(`Missing value for ${flag}`); + } + return value; +} + +function parseCliArgs(args: string[]): CliOverrides { + const overrides: CliOverrides = {}; + + for (let index = 0; index < args.length; index++) { + const arg = args[index]; + if (arg === '--help' || arg === '-h') { + printUsage(); + process.exit(0); + } + + if (arg === '--provider') { + overrides.provider = normalizeProvider(requireValue(args, index, arg)); + index++; + continue; + } + + if (arg.startsWith('--provider=')) { + overrides.provider = normalizeProvider(arg.slice('--provider='.length)); + continue; + } + + if (arg === '--model') { + overrides.model = requireValue(args, index, arg); + index++; + continue; + } + + if (arg.startsWith('--model=')) { + overrides.model = arg.slice('--model='.length); + continue; + } + + if (arg === '--base-url') { + overrides.baseUrl = requireValue(args, index, arg); + index++; + continue; + } + + if (arg.startsWith('--base-url=')) { + overrides.baseUrl = arg.slice('--base-url='.length); + continue; + } + + if (arg === '--timeout-ms') { + overrides.timeoutMs = Number(requireValue(args, index, arg)); + index++; + continue; + } + + if (arg.startsWith('--timeout-ms=')) { + overrides.timeoutMs = Number(arg.slice('--timeout-ms='.length)); + continue; + } + + if (arg === '--workspace') { + overrides.workspace = requireValue(args, index, arg); + index++; + continue; + } + + if (arg.startsWith('--workspace=')) { + overrides.workspace = arg.slice('--workspace='.length); + continue; + } + + if (arg === '--local-llm-url') { + overrides.localLLMUrl = requireValue(args, index, arg); + index++; + continue; + } + + if (arg.startsWith('--local-llm-url=')) { + overrides.localLLMUrl = arg.slice('--local-llm-url='.length); + continue; + } + + throw new Error(`Unknown argument: ${arg}`); + } + + if (overrides.timeoutMs !== undefined && !Number.isFinite(overrides.timeoutMs)) { + throw new Error('timeout-ms must be a number'); + } + + return overrides; +} + +async function main(): Promise { + const overrides = parseCliArgs(process.argv.slice(2)); + Configuration.resetInstance(); + + const config = getConfigInstance(); + config.updateConfig({ + workspace: overrides.workspace || process.cwd(), + ...(overrides.provider ? { provider: overrides.provider } : {}), + ...(overrides.model ? { model: overrides.model } : {}), + ...(overrides.baseUrl ? { baseUrl: overrides.baseUrl } : {}), + ...(overrides.timeoutMs !== undefined ? { timeoutMs: overrides.timeoutMs } : {}), + ...(overrides.localLLMUrl ? { localLLMUrl: overrides.localLLMUrl } : {}) + }); + + console.log(`[LSPRAG] Testing ${config.provider}/${config.model}`); + console.log(`[LSPRAG] Workspace: ${config.workspace}`); + console.log(`[LSPRAG] Timeout: ${Math.round(config.timeoutMs / 1000)}s`); + if (config.baseUrl) { + console.log(`[LSPRAG] Base URL: ${config.baseUrl}`); + } + if (config.proxyUrl) { + console.log(`[LSPRAG] Proxy: ${config.proxyUrl}`); + } + + const result = await runLLMHealthcheck(); + console.log(`[LSPRAG] Success in ${result.elapsedMs}ms: ${result.response}`); +} + +main().catch((error) => { + const errorMessage = error instanceof Error ? error.message : String(error); + console.error(`[LSPRAG] Test LLM failed: ${errorMessage}`); + process.exit(1); +}); diff --git a/src/commands/generateUnitTestCommand.ts b/src/commands/generateUnitTestCommand.ts new file mode 100644 index 0000000..a5dd8e8 --- /dev/null +++ b/src/commands/generateUnitTestCommand.ts @@ -0,0 +1,219 @@ +import * as path from 'path'; +import * as vscode from 'vscode'; +import { generateUnitTestForAFunction } from '../generate'; +import { getConfigInstance } from '../config'; +import { generateFileNameForDiffLanguage, getFileName, saveCode } from '../fileHandler'; +import { getAllSymbols } from '../lsp/symbol'; +import { showDiffAndAllowSelection } from '../userInteraction'; + +export interface GenerateUnitTestCommandOptions { + filePath?: string; + functionName?: string; + line?: number; + character?: number; + showGeneratedCode?: boolean; + silent?: boolean; +} + +export interface GenerateUnitTestCommandResult { + finalCode: string; + fullFileName: string; + savedFilePath?: string; + functionName: string; + sourceFilePath: string; +} + +function isCallableSymbol(symbol: vscode.DocumentSymbol): boolean { + return symbol.kind === vscode.SymbolKind.Function + || symbol.kind === vscode.SymbolKind.Method + || symbol.kind === vscode.SymbolKind.Constructor; +} + +function findSymbolByName(symbols: vscode.DocumentSymbol[], functionName: string): vscode.DocumentSymbol | null { + const matched = symbols.filter(symbol => isCallableSymbol(symbol) && symbol.name === functionName); + if (matched.length === 0) { + return null; + } + return matched[0]; +} + +function getAvailableCallableNames(symbols: vscode.DocumentSymbol[]): string[] { + return Array.from(new Set( + symbols + .filter(isCallableSymbol) + .map(symbol => symbol.name) + )).sort((left, right) => left.localeCompare(right)); +} + +function findSymbolByPosition( + symbols: vscode.DocumentSymbol[], + position: vscode.Position +): vscode.DocumentSymbol | null { + const matched = symbols + .filter(symbol => isCallableSymbol(symbol) && symbol.range.contains(position)) + .sort((left, right) => { + const leftSpan = left.range.end.line - left.range.start.line; + const rightSpan = right.range.end.line - right.range.start.line; + return leftSpan - rightSpan; + }); + + return matched[0] ?? null; +} + +async function resolveDocument(options?: GenerateUnitTestCommandOptions): Promise { + if (options?.filePath) { + const document = await vscode.workspace.openTextDocument(options.filePath); + await vscode.window.showTextDocument(document, { + preview: true, + preserveFocus: true + }); + return document; + } + + const editor = vscode.window.activeTextEditor; + if (!editor) { + return null; + } + + return editor.document; +} + +async function resolveTargetSymbol( + document: vscode.TextDocument, + options: GenerateUnitTestCommandOptions | undefined, + symbols: vscode.DocumentSymbol[] +): Promise { + if (options?.functionName) { + return findSymbolByName(symbols, options.functionName); + } + + let position: vscode.Position | null = null; + if (options?.line !== undefined) { + const zeroBasedLine = Math.max(0, options.line - 1); + const zeroBasedCharacter = Math.max(0, options.character ?? 0); + position = new vscode.Position(zeroBasedLine, zeroBasedCharacter); + } else if (vscode.window.activeTextEditor?.document.uri.toString() === document.uri.toString()) { + position = vscode.window.activeTextEditor.selection.active; + } + + if (!position) { + return null; + } + + return findSymbolByPosition(symbols, position); +} + +function showError(message: string, silent = false): void { + if (!silent) { + vscode.window.showErrorMessage(message); + } + console.error(message); +} + +function showInfo(message: string, silent = false): void { + if (!silent) { + vscode.window.showInformationMessage(message); + } + console.log(message); +} + +function getOutputFolder(workspace: string): string { + const configuredSavePath = getConfigInstance().savePath; + return path.isAbsolute(configuredSavePath) + ? configuredSavePath + : path.join(workspace, configuredSavePath); +} + +export async function runGenerateUnitTestCommand( + options?: GenerateUnitTestCommandOptions +): Promise { + const showGeneratedCode = options?.showGeneratedCode ?? true; + const silent = options?.silent ?? false; + const document = await resolveDocument(options); + + if (!document) { + showError('Please open a file and select a function to generate unit test.', silent); + return null; + } + + const symbols = await getAllSymbols(document.uri); + if (!symbols || symbols.length === 0) { + showError('No symbols found! - It seems language server is not running.', silent); + return null; + } + + const functionSymbol = await resolveTargetSymbol(document, options, symbols); + if (!functionSymbol) { + const targetDescriptor = options?.functionName + ? `function "${options.functionName}"` + : options?.line !== undefined + ? `line ${options.line}` + : 'current selection'; + const availableNames = getAvailableCallableNames(symbols); + const availableMessage = availableNames.length > 0 + ? ` Available callables: ${availableNames.join(', ')}.` + : ''; + showError(`No target function found for ${targetDescriptor}.${availableMessage}`, silent); + return null; + } + + const workspace = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath; + if (!workspace) { + showError('No workspace folder found. Please open the project folder in VS Code.', silent); + return null; + } + + getConfigInstance().updateConfig({ + workspace + }); + + const outputFolder = getOutputFolder(workspace); + const fullFileName = generateFileNameForDiffLanguage( + document, + functionSymbol, + outputFolder, + document.languageId, + [], + 0 + ); + + try { + const finalCode = await generateUnitTestForAFunction( + workspace, + document, + functionSymbol, + fullFileName, + showGeneratedCode + ); + + if (!finalCode) { + showError('Failed to generate unit test!', silent); + return null; + } + + let savedFilePath: string | undefined; + if (showGeneratedCode) { + const fileName = getFileName(fullFileName); + showDiffAndAllowSelection(finalCode, document.languageId, fileName); + } else { + savedFilePath = await saveCode( + finalCode, + path.dirname(fullFileName), + path.basename(fullFileName) + ); + } + + showInfo('Unit test generated successfully!', silent); + return { + finalCode, + fullFileName, + savedFilePath, + functionName: functionSymbol.name, + sourceFilePath: document.uri.fsPath + }; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + showError(`Failed to generate unit test: ${errorMessage}`, silent); + return null; + } +} diff --git a/src/config.ts b/src/config.ts index 39429af..b1a5897 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,4 +1,4 @@ -import { existsSync, mkdirSync } from 'fs'; +import { existsSync, mkdirSync, readFileSync } from 'fs'; import path from 'path'; // Optional vscode import - only available in VSCode extension context @@ -120,6 +120,88 @@ let seededRandom: () => number; export type Provider = 'openai' | 'local' | 'deepseek'; +type WorkspaceSettings = Record & { + LSPRAG?: Record; +}; + +function normalizeProvider(value: string | undefined): Provider | undefined { + if (!value) { + return undefined; + } + + if (value === 'ollama') { + return 'local'; + } + + if (value === 'openai' || value === 'local' || value === 'deepseek') { + return value; + } + + return undefined; +} + +function toNumber(value: unknown): number | undefined { + if (typeof value === 'number' && Number.isFinite(value)) { + return value; + } + + if (typeof value === 'string' && value.trim().length > 0) { + const parsed = Number(value); + if (Number.isFinite(parsed)) { + return parsed; + } + } + + return undefined; +} + +function loadWorkspaceSettingsFromFile(workspace: string): Partial<{ + model: string; + provider: Provider; + promptType: PromptType; + generationType: GenerationType; + timeoutMs: number; + maxRound: number; + openaiApiKey: string; + deepseekApiKey: string; + baseUrl: string; + localLLMUrl: string; + savePath: string; + proxyUrl: string; +}> { + const settingsPath = path.join(workspace, '.vscode', 'settings.json'); + if (!existsSync(settingsPath)) { + return {}; + } + + try { + const rawSettings = readFileSync(settingsPath, 'utf8'); + const parsed = JSON.parse(rawSettings) as WorkspaceSettings; + const nestedSettings = parsed.LSPRAG ?? {}; + const readSetting = (flatKey: string, nestedKey: string): T | undefined => { + return (parsed[flatKey] as T | undefined) ?? (nestedSettings[nestedKey] as T | undefined); + }; + + return { + model: readSetting('LSPRAG.model', 'model'), + provider: normalizeProvider(readSetting('LSPRAG.provider', 'provider')), + promptType: readSetting('LSPRAG.promptType', 'promptType'), + generationType: readSetting('LSPRAG.generationType', 'generationType'), + timeoutMs: toNumber(readSetting('LSPRAG.timeoutMs', 'timeoutMs')), + maxRound: toNumber(readSetting('LSPRAG.maxRound', 'maxRound')), + openaiApiKey: readSetting('LSPRAG.openaiApiKey', 'openaiApiKey'), + deepseekApiKey: readSetting('LSPRAG.deepseekApiKey', 'deepseekApiKey'), + baseUrl: readSetting('LSPRAG.baseUrl', 'baseUrl'), + localLLMUrl: readSetting('LSPRAG.localLLMUrl', 'localLLMUrl'), + savePath: readSetting('LSPRAG.savePath', 'savePath'), + proxyUrl: (parsed['http.proxy'] as string | undefined) ?? '' + }; + } catch (error) { + console.warn(`[CONFIG] Failed to parse ${settingsPath}:`, error); + return {}; + } +} + // Function to load private configuration const DEFAULT_CONFIG = { @@ -128,7 +210,7 @@ const DEFAULT_CONFIG = { parallelCount: 1, model: 'deepseek-chat', provider: 'deepseek' as Provider, - timeoutMs: 600 * 1000, + timeoutMs: 30 * 1000, promptType: PromptType.BASIC, fixType: FixType.ORIGINAL, generationType: GenerationType.LSPRAG, @@ -140,6 +222,7 @@ const DEFAULT_CONFIG = { openaiApiKey: string; deepseekApiKey: string; localLLMUrl: string; + baseUrl?: string; proxyUrl?: string; } @@ -321,6 +404,7 @@ export class Configuration { maxRound: (config.get('maxRound') as number) ?? DEFAULT_CONFIG.maxRound, openaiApiKey: config.get('openaiApiKey') as string, deepseekApiKey: config.get('deepseekApiKey') as string, + baseUrl: config.get('baseUrl') as string, localLLMUrl: config.get('localLLMUrl') as string, savePath: (config.get('savePath') as string) ?? DEFAULT_CONFIG.savePath, proxyUrl: globalProxy || '' @@ -328,32 +412,31 @@ export class Configuration { } else { // Running outside VSCode (e.g., standalone scripts) - use defaults or environment variables console.log('[CONFIG] No VSCode context, using default configuration'); + const workspace = process.env.LSPRAG_WORKSPACE || process.cwd(); + const workspaceSettings = loadWorkspaceSettingsFromFile(workspace); return { - workspace: process.env.LSPRAG_WORKSPACE || process.cwd(), + workspace: workspace, expProb: DEFAULT_CONFIG.expProb, - model: process.env.LSPRAG_MODEL || DEFAULT_CONFIG.model, - provider: (process.env.LSPRAG_PROVIDER as Provider) || DEFAULT_CONFIG.provider, - promptType: DEFAULT_CONFIG.promptType, - generationType: DEFAULT_CONFIG.generationType, + model: process.env.LSPRAG_MODEL || workspaceSettings.model || DEFAULT_CONFIG.model, + provider: normalizeProvider(process.env.LSPRAG_PROVIDER) || workspaceSettings.provider || DEFAULT_CONFIG.provider, + promptType: workspaceSettings.promptType || DEFAULT_CONFIG.promptType, + generationType: workspaceSettings.generationType || DEFAULT_CONFIG.generationType, fixType: DEFAULT_CONFIG.fixType, - timeoutMs: DEFAULT_CONFIG.timeoutMs, + timeoutMs: toNumber(process.env.LSPRAG_TIMEOUT_MS) || workspaceSettings.timeoutMs || DEFAULT_CONFIG.timeoutMs, parallelCount: DEFAULT_CONFIG.parallelCount, - maxRound: DEFAULT_CONFIG.maxRound, + maxRound: workspaceSettings.maxRound || DEFAULT_CONFIG.maxRound, testNumber: DEFAULT_CONFIG.testNumber, - openaiApiKey: process.env.OPENAI_API_KEY, - deepseekApiKey: process.env.DEEPSEEK_API_KEY, - localLLMUrl: process.env.LOCAL_LLM_URL, - savePath: DEFAULT_CONFIG.savePath, - proxyUrl: process.env.HTTP_PROXY || process.env.HTTPS_PROXY || '' + openaiApiKey: process.env.OPENAI_API_KEY || workspaceSettings.openaiApiKey, + deepseekApiKey: process.env.DEEPSEEK_API_KEY || workspaceSettings.deepseekApiKey, + baseUrl: process.env.LSPRAG_BASE_URL || workspaceSettings.baseUrl || '', + localLLMUrl: process.env.LOCAL_LLM_URL || workspaceSettings.localLLMUrl, + savePath: workspaceSettings.savePath || DEFAULT_CONFIG.savePath, + proxyUrl: process.env.HTTP_PROXY || process.env.HTTPS_PROXY || workspaceSettings.proxyUrl || '' }; } } - private adjustTimeout(): void { - if (this.provider === 'local' || this.provider === 'deepseek') { - this.config.timeoutMs *= 2; - } - } + private adjustTimeout(): void {} private constructResultPath(): string { return path.join( @@ -504,6 +587,10 @@ export class Configuration { return this.config.proxyUrl; } + public get baseUrl(): string | undefined { + return this.config.baseUrl; + } + public get testNumber(): number { return this.config.testNumber; } @@ -534,4 +621,4 @@ export const SRC_PATHS = { // export const currentPromptType = configInstance.promptType; // export const currentParallelCount = configInstance.parallelCount; // export const maxRound = configInstance.maxRound; -// export const currentTimeout = configInstance.timeoutMs; \ No newline at end of file +// export const currentTimeout = configInstance.timeoutMs; diff --git a/src/currentSettings.ts b/src/currentSettings.ts new file mode 100644 index 0000000..8958158 --- /dev/null +++ b/src/currentSettings.ts @@ -0,0 +1,19 @@ +import { getConfigInstance } from './config'; + +export function getCurrentSettingsLines(): string[] { + const config = getConfigInstance(); + return [ + `Workspace: ${config.workspace}`, + `Model: ${config.model}`, + `Provider: ${config.provider}`, + `Generation Type: ${config.generationType}`, + `Max Rounds: ${config.maxRound}`, + `Experiment Probability: ${config.expProb}`, + `Save Path: ${config.savePath}`, + `Timeout: ${config.timeoutMs}` + ]; +} + +export function getCurrentSettingsText(): string { + return getCurrentSettingsLines().join('\n'); +} diff --git a/src/extension.ts b/src/extension.ts index ad88321..3bb350f 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -1,16 +1,16 @@ import * as vscode from 'vscode'; -import { generateUnitTestForSelectedRange } from './generate'; import { Configuration, getConfigInstance } from './config'; -import { invokeLLM } from './invokeLLM'; import { getAllSymbols } from './lsp/symbol'; import { getDecodedTokensFromSymbol } from './lsp/token'; +import { runLLMHealthcheck } from './llmHealthcheck'; +import { GenerateUnitTestCommandOptions, runGenerateUnitTestCommand } from './commands/generateUnitTestCommand'; +import { getCurrentSettingsLines } from './currentSettings'; export async function activate(context: vscode.ExtensionContext) { - try { const workspace = vscode.workspace.workspaceFolders; - if (!Configuration.isTestingEnvironment() && workspace && workspace.length > 0) { + if (!Configuration.isTestingEnvironment() && workspace && workspace.length > 0) { console.log(`Workspace: ${workspace[0].uri.fsPath}`); getConfigInstance().updateConfig({ workspace: workspace[0].uri.fsPath @@ -19,38 +19,45 @@ export async function activate(context: vscode.ExtensionContext) { console.log(`No workspace found`); } - const testLLMDisposable = vscode.commands.registerCommand('extension.testLLM', async () => { - const promptObj = [ - { - role: 'system', - content: 'You are a helpful assistant.' - }, - { - role: 'user', - content: 'What is the capital of the moon?' - } - ]; - const modelName = getConfigInstance().model; - vscode.window.showInformationMessage(`Testing ${modelName} invoked LLM.`); - const response = await invokeLLM(promptObj, []); - if (response) { - vscode.window.showInformationMessage('Successfully invoked LLM.', + const config = getConfigInstance(); + const modelName = config.model; + const provider = config.provider; + const timeoutSeconds = Math.round(config.timeoutMs / 1000); + console.log(`testLLM command started. provider=${provider}, model=${modelName}, timeout=${timeoutSeconds}s`); + try { + await vscode.window.withProgress( { - modal: true + location: vscode.ProgressLocation.Notification, + title: `Testing ${provider}/${modelName}`, + cancellable: false + }, + async (progress) => { + progress.report({ + message: `Waiting for LLM response (timeout: ${timeoutSeconds}s)` + }); + const result = await runLLMHealthcheck(); + vscode.window.showInformationMessage( + `Successfully invoked LLM: ${result.response}`, + { + modal: true + } + ); } ); - } else { - vscode.window.showErrorMessage('Failed to invoke LLM.', + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + console.error('testLLM command failed:', error); + vscode.window.showErrorMessage(`Test LLM failed: ${errorMessage}`, { modal: true } ); } }); - + context.subscriptions.push(testLLMDisposable); - + const testLSPDisposable = vscode.commands.registerCommand('extension.testLSP', async () => { const editor = vscode.window.activeTextEditor; if (!editor) { @@ -75,8 +82,8 @@ export async function activate(context: vscode.ExtensionContext) { // Test 2: Token Extraction vscode.window.showInformationMessage('Testing token extraction...'); - const firstFunctionSymbol = symbols.find(s => - s.kind === vscode.SymbolKind.Function || + const firstFunctionSymbol = symbols.find(s => + s.kind === vscode.SymbolKind.Function || s.kind === vscode.SymbolKind.Method ); @@ -89,7 +96,6 @@ export async function activate(context: vscode.ExtensionContext) { console.log(`Extracted ${tokens.length} tokens from symbol: ${firstFunctionSymbol.name}`); console.log('Tokens:', tokens.map(t => t.word)); - // Show success message with results const message = `LSP Test Success!\nSymbols: ${symbols.length}\nTokens from "${firstFunctionSymbol.name}": ${tokens.length}`; vscode.window.showInformationMessage(message, { @@ -106,46 +112,33 @@ export async function activate(context: vscode.ExtensionContext) { ); } }); - - context.subscriptions.push(testLSPDisposable); - const disposable = vscode.commands.registerCommand('extension.generateUnitTest', async () => { - const editor = vscode.window.activeTextEditor; - if (!editor) { - vscode.window.showErrorMessage('Please open a file and select a function to generate unit test.'); - return; - } - const testCode = await generateUnitTestForSelectedRange(editor.document, editor.selection.active); + context.subscriptions.push(testLSPDisposable); + const disposable = vscode.commands.registerCommand('extension.generateUnitTest', async (options?: GenerateUnitTestCommandOptions) => { + return runGenerateUnitTestCommand(options); }); - + context.subscriptions.push(disposable); - + const showSettingsDisposable = vscode.commands.registerCommand('LSPRAG.showSettings', () => { - const settings = [ - `Model: ${getConfigInstance().model}`, - `Provider: ${getConfigInstance().provider}`, - `Max Rounds: ${getConfigInstance().maxRound}`, - `Experiment Probability: ${getConfigInstance().expProb}`, - `Save Path: ${getConfigInstance().savePath}`, - `Timeout: ${getConfigInstance().timeoutMs}` - ]; - + const settings = getCurrentSettingsLines(); + vscode.window.showInformationMessage('Current Settings:', { detail: settings.join('\n'), modal: true }); }); - context.subscriptions.push(showSettingsDisposable); + context.subscriptions.push(showSettingsDisposable); } catch (error) { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; console.error('Failed to activate LSPRAG extension:', error); - vscode.window.showErrorMessage(`LSPRAG activation failed: ${errorMessage}`, + vscode.window.showErrorMessage(`LSPRAG activation failed: ${errorMessage}`, { modal: true } ); } } -export function deactivate() { } \ No newline at end of file +export function deactivate() { } diff --git a/src/invokeLLM.ts b/src/invokeLLM.ts index bb4118b..b535f3b 100644 --- a/src/invokeLLM.ts +++ b/src/invokeLLM.ts @@ -1,4 +1,3 @@ -import * as vscode from "vscode"; import { OpenAI } from "openai"; import { HttpsProxyAgent } from "https-proxy-agent/dist"; import { Ollama } from 'ollama'; @@ -6,9 +5,57 @@ import { Configuration, getConfigInstance } from "./config"; import * as fs from 'fs'; import * as path from 'path'; +type VSCodeLike = { + window?: { + showErrorMessage(message: string): void; + }; +}; + +let vscodeApi: VSCodeLike | null = null; +try { + vscodeApi = require('vscode') as VSCodeLike; +} catch (error) { + vscodeApi = null; +} + export const TOKENTHRESHOLD = 3000; // Define your token threshold here export const BASELINE = "naive"; +const TRUE_VALUES = new Set(['1', 'true', 'yes', 'on']); + +function envFlagEnabled(value: string | undefined): boolean { + return TRUE_VALUES.has((value || '').trim().toLowerCase()); +} + +export function isSkipLLMRequested(): boolean { + return envFlagEnabled(process.env.LSPRAG_SKIP_LLM) || envFlagEnabled(process.env.TEST_SKIP_LLM); +} + +function getDraftCodeFromPrompt(userPrompt: string): string { + const markers = [ + '### Draft test code with test prefix path coverage requirements', + '### Draft test code' + ]; + for (const marker of markers) { + const markerIndex = userPrompt.indexOf(marker); + if (markerIndex < 0) { + continue; + } + const region = userPrompt.slice(markerIndex + marker.length); + const match = region.match(/```(?:\w+)?\s*([\s\S]*?)\s*```/); + if (match?.[1]) { + return match[1].trim(); + } + } + return ''; +} + +export function isSkipLLMModeEnabled(): boolean { + if (!isSkipLLMRequested()) { + return false; + } + return (process.env.TEST_TYPE || '').trim().toLowerCase() === 'config'; +} export class TokenLimitExceededError extends Error { constructor(message: string) { @@ -38,6 +85,46 @@ export function getModelName(): string { return getConfigInstance().model.split("_").pop()!; } +function getConfiguredBaseUrl(provider: 'openai' | 'deepseek'): string { + const customBaseUrl = getConfigInstance().baseUrl?.trim() || process.env.LSPRAG_BASE_URL?.trim(); + if (customBaseUrl) { + return customBaseUrl; + } + return provider === 'deepseek' ? 'https://api.deepseek.com' : 'https://api.openai.com/v1'; +} + +function getRequestTimeoutMs(): number { + return getConfigInstance().timeoutMs; +} + +function createTimeoutError(provider: string, timeoutMs: number): Error { + return new Error(`${provider} request timed out after ${Math.round(timeoutMs / 1000)}s`); +} + +async function withTimeout(promise: Promise, provider: string, timeoutMs = getRequestTimeoutMs()): Promise { + let timeoutHandle: NodeJS.Timeout | undefined; + const timeoutPromise = new Promise((_, reject) => { + timeoutHandle = setTimeout(() => reject(createTimeoutError(provider, timeoutMs)), timeoutMs); + }); + + try { + return await Promise.race([promise, timeoutPromise]); + } finally { + if (timeoutHandle) { + clearTimeout(timeoutHandle); + } + } +} + +function extractMessageContent(response: any, provider: string): string { + const firstChoice = response?.choices?.[0]; + const content = firstChoice?.message?.content; + if (typeof content !== 'string' || content.length === 0) { + throw new Error(`${provider} returned an unexpected response: missing choices[0].message.content`); + } + return content; +} + export function getModelConfigError(): string | undefined { const provider = getConfigInstance().provider; switch (provider) { @@ -47,7 +134,7 @@ export function getModelConfigError(): string | undefined { } break; case 'local': - if (!getConfigInstance().localLLMUrl) { + if (!getConfigInstance().localLLMUrl && !process.env.LOCAL_LLM_URL) { return 'Local LLM URL is not configured. Please set LSPRAG.localLLMUrl in settings.'; } break; @@ -60,17 +147,56 @@ export function getModelConfigError(): string | undefined { return undefined; } +function showErrorMessage(message: string): void { + if (vscodeApi?.window?.showErrorMessage) { + vscodeApi.window.showErrorMessage(message); + return; + } + + console.error(message); +} + +function logLLMInteraction(prompt: string, response: string): void { + try { + const logSavePath = getConfigInstance().logSavePath; + if (!logSavePath) { + return; + } + + if (!fs.existsSync(logSavePath)) { + fs.mkdirSync(logSavePath, { recursive: true }); + } + + const logFilePath = path.join(logSavePath, 'llm_logs.jsonl'); + const logData = { + prompt, + response, + timestamp: new Date().toISOString() + }; + fs.appendFileSync(logFilePath, JSON.stringify(logData) + '\n', 'utf8'); + } catch (error) { + console.error('Failed to log LLM interaction:', error); + } +} + export async function callLocalLLM(promptObj: any, logObj: any): Promise { // const modelName = getModelName(method); const modelName = getModelName(); logObj.prompt = promptObj[1]?.content; // Adjusted to ensure promptObj[1] exists - const ollama = new Ollama({ host: getConfigInstance().localLLMUrl }); + const localLLMUrl = getConfigInstance().localLLMUrl || process.env.LOCAL_LLM_URL; + if (!localLLMUrl) { + throw new Error('Local LLM URL not configured. Please set LSPRAG.localLLMUrl in settings or LOCAL_LLM_URL in the environment.'); + } + const ollama = new Ollama({ host: localLLMUrl }); try { - const response = await ollama.chat({ - model: modelName, - messages: promptObj, - stream: false, - }); + const response = await withTimeout( + ollama.chat({ + model: modelName, + messages: promptObj, + stream: false, + }) as Promise, + 'Local LLM' + ); const result = await response; const content = result.message.content; const tokenUsage = result.prompt_eval_count; @@ -85,37 +211,44 @@ export async function callLocalLLM(promptObj: any, logObj: any): Promise } // ... existing code ... -export async function invokeLLM(promptObj: any, logObj: any, maxRetries = 2, retryDelay = 2000): Promise { - const error = getModelConfigError(); - if (error) { - vscode.window.showErrorMessage(error); - console.error('invokeLLM::error', error); - return ""; - } - +export async function invokeLLM( + promptObj: any, + logObj: any = { prompt: '', result: '', tokenUsage: 0, model: '' }, + maxRetries = 2, + retryDelay = 2000 +): Promise { // Validate promptObj structure if (!Array.isArray(promptObj) || promptObj.length < 2) { const errorMsg = 'Invalid promptObj: must be an array with at least 2 elements'; console.error('invokeLLM::error', errorMsg); - vscode.window.showErrorMessage(errorMsg); + showErrorMessage(errorMsg); return ""; } if (!promptObj[0]?.content || !promptObj[1]?.content) { const errorMsg = 'Invalid promptObj: elements must have content property'; console.error('invokeLLM::error', errorMsg); - vscode.window.showErrorMessage(errorMsg); + showErrorMessage(errorMsg); return ""; } - // console.log('invokeLLM::promptObj', promptObj); - console.log('invokeLLM::promptObj_system', promptObj[0].content); - console.log('invokeLLM::promptObj_user', promptObj[1].content); - const messageTokens = promptObj[1].content.split(/\s+/).length; - // console.log("Invoking . . ."); - // if (messageTokens > TOKENTHRESHOLD) { - // throw new TokenLimitExceededError(`Prompt exceeds token limit of ${TOKENTHRESHOLD} tokens.`); - // } + if (isSkipLLMModeEnabled()) { + const userPrompt = promptObj[1]?.content || ''; + const draft = getDraftCodeFromPrompt(userPrompt); + const syntheticResponse = draft ? `\`\`\`\n${draft}\n\`\`\`` : '```python\npass\n```'; + logObj.prompt = userPrompt; + logObj.result = syntheticResponse; + logObj.tokenUsage = '0'; + logLLMInteraction(userPrompt, syntheticResponse); + return syntheticResponse; + } + + const error = getModelConfigError(); + if (error) { + showErrorMessage(error); + console.error('invokeLLM::error', error); + return ""; + } const provider = getConfigInstance().provider; @@ -138,27 +271,19 @@ export async function invokeLLM(promptObj: any, logObj: any, maxRetries = 2, ret throw new Error("Unsupported provider!"); } - // Log the prompt and response - if (fs.existsSync(getConfigInstance().logSavePath) && promptObj[1]?.content) { - const logData = { - prompt: promptObj[1].content, - response: response, - timestamp: new Date().toISOString() - }; - const logFilePath = path.join(getConfigInstance().logSavePath, 'llm_logs.json'); - fs.appendFileSync(logFilePath, JSON.stringify(logData) + '\n'); + if (promptObj[1]?.content) { + logLLMInteraction(promptObj[1].content, response); } return response; } catch (error) { lastError = error as Error; - console.log(`Attempt ${attempt}/${maxRetries} failed: ${error}`); + console.log(`Attempt ${attempt}/${maxRetries} failed: ${lastError.message}`); if (attempt < maxRetries) { // Add exponential backoff with jitter for more robust retrying const jitter = Math.random() * 1000; const delay = retryDelay * Math.pow(2, attempt - 1) + jitter; - console.log(`Retrying in ${Math.round(delay / 1000)} seconds...`); await new Promise(resolve => setTimeout(resolve, delay)); } } @@ -166,7 +291,7 @@ export async function invokeLLM(promptObj: any, logObj: any, maxRetries = 2, ret // If we've exhausted all retries, throw the last error if (lastError) { - vscode.window.showErrorMessage(`Failed after ${maxRetries} attempts: ${lastError.message}`); + showErrorMessage(`Failed after ${maxRetries} attempts: ${lastError.message}`); throw lastError; } @@ -179,28 +304,28 @@ export async function callDeepSeek(promptObj: any, logObj: any): Promise const modelName = getModelName(); logObj.prompt = promptObj[1]?.content || ''; - const apiKey = getConfigInstance().deepseekApiKey; + const proxy = getConfigInstance().proxyUrl || process.env.HTTP_PROXY || process.env.HTTPS_PROXY; + const apiKey = getConfigInstance().deepseekApiKey || process.env.DEEPSEEK_API_KEY; if (!apiKey) { throw new Error('Deepseek API key not configured. Please set it in VS Code settings.'); } const openai = new OpenAI({ - baseURL: 'https://api.deepseek.com', + baseURL: getConfiguredBaseUrl('deepseek'), apiKey: apiKey, + timeout: getRequestTimeoutMs(), + ...(proxy && { httpAgent: new HttpsProxyAgent(proxy) }) }); try { const response = await openai.chat.completions.create({ model: modelName, messages: promptObj }); - console.log('invokeLLM::callDeepSeek::response', JSON.stringify(response, null, 2)); - const result = response.choices[0].message.content!; - const tokenUsage = response.usage!.prompt_tokens; + const result = extractMessageContent(response, 'DeepSeek'); + const tokenUsage = response.usage?.prompt_tokens; logObj.tokenUsage = tokenUsage; - logObj.result = result + "" + ((response.choices[0].message as any).reasoning_content || '');; - // console.log('Generated test code:', result); - // console.log('Token usage:', tokenUsage); + logObj.result = result + "" + ((response.choices?.[0]?.message as any)?.reasoning_content || ''); return result; } catch (e) { console.error('Error generating test code:', e); @@ -209,18 +334,13 @@ export async function callDeepSeek(promptObj: any, logObj: any): Promise } export async function callOpenAi(promptObj: any, logObj: any): Promise { - // console.log('invokeLLM::callOpenAi::proxyUrl', getConfigInstance().logAllConfig()); - const proxy = getConfigInstance().proxyUrl; - const apiKey = getConfigInstance().openaiApiKey; - console.log('invokeLLM::callOpenAi::proxy', proxy); - // console.log('invokeLLM::callOpenAi::apiKey', apiKey); + const proxy = getConfigInstance().proxyUrl || process.env.HTTP_PROXY || process.env.HTTPS_PROXY; + const apiKey = getConfigInstance().openaiApiKey || process.env.OPENAI_API_KEY; if (!apiKey) { throw new Error('OpenAI API key not configured. Please set it in VS Code settings.'); } - // const modelName = getModelName(method); const modelName = getModelName(); - console.log('invokeLLM::callOpenAi::modelName', modelName); if (proxy) { process.env.http_proxy = proxy; process.env.https_proxy = proxy; @@ -231,7 +351,9 @@ export async function callOpenAi(promptObj: any, logObj: any): Promise { logObj.prompt = promptObj[1]?.content || ''; const openai = new OpenAI({ + baseURL: getConfiguredBaseUrl('openai'), apiKey: apiKey, + timeout: getRequestTimeoutMs(), ...(proxy && { httpAgent: new HttpsProxyAgent(proxy) }) }); try { @@ -239,15 +361,13 @@ export async function callOpenAi(promptObj: any, logObj: any): Promise { model: modelName, messages: promptObj }); - const result = response.choices[0].message.content!; - const tokenUsage = response.usage!.prompt_tokens; + const result = extractMessageContent(response, 'OpenAI'); + const tokenUsage = response.usage?.prompt_tokens; logObj.tokenUsage = tokenUsage; logObj.result = result; - console.log('Generated test code:', result); - console.log('Token usage:', tokenUsage); return result; } catch (e) { console.error('Error generating test code:', e); throw e; } -} \ No newline at end of file +} diff --git a/src/llmHealthcheck.ts b/src/llmHealthcheck.ts new file mode 100644 index 0000000..b2e154b --- /dev/null +++ b/src/llmHealthcheck.ts @@ -0,0 +1,49 @@ +import { getConfigInstance } from './config'; +import { invokeLLM } from './invokeLLM'; + +type PromptMessage = { + role: 'system' | 'user'; + content: string; +}; + +export interface LLMHealthcheckResult { + provider: string; + model: string; + timeoutSeconds: number; + elapsedMs: number; + response: string; +} + +export function createLLMHealthcheckPrompt(): PromptMessage[] { + return [ + { + role: 'system', + content: 'Reply with exactly: OK' + }, + { + role: 'user', + content: 'Reply with exactly: OK' + } + ]; +} + +export async function runLLMHealthcheck(): Promise { + const config = getConfigInstance(); + const provider = config.provider; + const model = config.model; + const timeoutSeconds = Math.round(config.timeoutMs / 1000); + const startedAt = Date.now(); + + const response = (await invokeLLM(createLLMHealthcheckPrompt(), [], 1, 0)).trim(); + if (response !== 'OK') { + throw new Error(`Unexpected LLM response: ${response || '(empty)'}`); + } + + return { + provider, + model, + timeoutSeconds, + elapsedMs: Date.now() - startedAt, + response + }; +} diff --git a/src/test/generateCliHost.ts b/src/test/generateCliHost.ts new file mode 100644 index 0000000..2140cc2 --- /dev/null +++ b/src/test/generateCliHost.ts @@ -0,0 +1,157 @@ +import * as fs from 'fs'; +import * as vscode from 'vscode'; +import { getConfigInstance } from '../config'; +import { GenerateUnitTestCommandOptions, GenerateUnitTestCommandResult } from '../commands/generateUnitTestCommand'; + +type CliConfigOverrides = { + workspace: string; + provider?: string; + model?: string; + baseUrl?: string; + timeoutMs?: number; + savePath?: string; + localLLMUrl?: string; + openaiApiKey?: string; + deepseekApiKey?: string; + proxyUrl?: string; +}; + +function getRequiredEnv(name: string): string { + const value = process.env[name]; + if (!value) { + throw new Error(`Missing required environment variable: ${name}`); + } + return value; +} + +function toNumber(value: string | undefined): number | undefined { + if (!value || value.trim().length === 0) { + return undefined; + } + + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : undefined; +} + +function buildConfigOverrides(workspace: string): CliConfigOverrides { + return { + workspace, + ...(process.env.LSPRAG_CLI_PROVIDER ? { provider: process.env.LSPRAG_CLI_PROVIDER } : {}), + ...(process.env.LSPRAG_CLI_MODEL ? { model: process.env.LSPRAG_CLI_MODEL } : {}), + ...(process.env.LSPRAG_CLI_BASE_URL ? { baseUrl: process.env.LSPRAG_CLI_BASE_URL } : {}), + ...(toNumber(process.env.LSPRAG_CLI_TIMEOUT_MS) !== undefined ? { timeoutMs: toNumber(process.env.LSPRAG_CLI_TIMEOUT_MS) } : {}), + ...(process.env.LSPRAG_CLI_SAVE_PATH ? { savePath: process.env.LSPRAG_CLI_SAVE_PATH } : {}), + ...(process.env.LOCAL_LLM_URL ? { localLLMUrl: process.env.LOCAL_LLM_URL } : {}), + ...(process.env.OPENAI_API_KEY ? { openaiApiKey: process.env.OPENAI_API_KEY } : {}), + ...(process.env.DEEPSEEK_API_KEY ? { deepseekApiKey: process.env.DEEPSEEK_API_KEY } : {}), + ...(process.env.HTTP_PROXY || process.env.HTTPS_PROXY ? { proxyUrl: process.env.HTTP_PROXY || process.env.HTTPS_PROXY } : {}) + }; +} + +function applyConfigOverrides(workspace: string): void { + const overrides = buildConfigOverrides(workspace); + getConfigInstance().updateConfig(overrides as any); +} + +function buildCommandOptions(filePath: string): GenerateUnitTestCommandOptions { + return { + filePath, + functionName: process.env.LSPRAG_CLI_FUNCTION_NAME, + line: toNumber(process.env.LSPRAG_CLI_LINE), + character: toNumber(process.env.LSPRAG_CLI_CHARACTER) ?? 0, + showGeneratedCode: false, + silent: true + }; +} + +function writeResult(resultFile: string, payload: Record): void { + fs.writeFileSync(resultFile, JSON.stringify(payload, null, 2), 'utf8'); +} + +function getExtensionIdsForLanguage(languageId: string): string[] { + switch (languageId) { + case 'python': + return ['ms-python.python', 'ms-python.vscode-pylance']; + case 'java': + return ['redhat.java']; + case 'go': + return ['golang.go']; + default: + return []; + } +} + +async function activateLanguageExtensions(languageId: string): Promise { + console.log('[LSPRAG] Visible extensions:', vscode.extensions.all.map(extension => extension.id).join(', ')); + const extensionIds = getExtensionIdsForLanguage(languageId); + for (const extensionId of extensionIds) { + const extension = vscode.extensions.getExtension(extensionId); + if (!extension) { + console.warn(`[LSPRAG] Extension not found: ${extensionId}`); + continue; + } + + console.log(`[LSPRAG] Activating extension: ${extensionId}`); + await extension.activate(); + } +} + +async function activateDevelopmentExtension(): Promise { + const extension = vscode.extensions.getExtension('LSPRAG.LSPRAG'); + if (!extension) { + throw new Error('Development extension LSPRAG.LSPRAG is not visible in the extension host.'); + } + + console.log(`[LSPRAG] Development extension active=${extension.isActive}`); + if (!extension.isActive) { + await extension.activate(); + } + console.log(`[LSPRAG] Development extension active=${extension.isActive} after activation`); +} + +export async function run(): Promise { + const workspace = getRequiredEnv('LSPRAG_CLI_WORKSPACE'); + const filePath = getRequiredEnv('LSPRAG_CLI_FILE_PATH'); + const resultFile = getRequiredEnv('LSPRAG_CLI_RESULT_FILE'); + + try { + await new Promise(resolve => setTimeout(resolve, 5000)); + applyConfigOverrides(workspace); + + const document = await vscode.workspace.openTextDocument(filePath); + await vscode.window.showTextDocument(document, { + preview: true, + preserveFocus: true + }); + await activateDevelopmentExtension(); + await activateLanguageExtensions(document.languageId); + await new Promise(resolve => setTimeout(resolve, 8000)); + + const result = await vscode.commands.executeCommand( + 'extension.generateUnitTest', + buildCommandOptions(filePath) + ); + + if (!result?.savedFilePath) { + throw new Error('Generation finished without a saved output file.'); + } + + writeResult(resultFile, { + ok: true, + sourceFilePath: result.sourceFilePath, + functionName: result.functionName, + savedFilePath: result.savedFilePath, + fullFileName: result.fullFileName + }); + console.log(`[LSPRAG] Generated test saved to ${result.savedFilePath}`); + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + writeResult(resultFile, { + ok: false, + error: errorMessage, + workspace, + filePath + }); + throw error; + } +} diff --git a/src/test/runExperiment.ts b/src/test/runExperiment.ts index eca47f4..07a5107 100644 --- a/src/test/runExperiment.ts +++ b/src/test/runExperiment.ts @@ -34,6 +34,8 @@ export enum PromptType { MAX_ROUND: '5', PROMPT_TYPE: PromptType.BASIC }; + + const DEFAULT_VSCODE_VERSION = process.env.LSPRAG_TEST_VSCODE_VERSION || 'stable'; // Function to load private configuration export function loadPrivateConfig(): PrivateConfig { @@ -159,14 +161,14 @@ async function main() { const args = parseCommandLineArgs(); console.log('test::runExperiment::args', args); // Download VS Code, unzip it, and run the integration test - const vscodeExecutablePath = await downloadAndUnzipVSCode('1.97.0'); // '1.98.2', '1.97.0', '1.96.0', '1.95.0']; + const vscodeExecutablePath = await downloadAndUnzipVSCode(DEFAULT_VSCODE_VERSION); const [cliPath, ...vscodeArgs] = resolveCliArgsFromVSCodeExecutablePath(vscodeExecutablePath); // Install required extensions cp.spawnSync( cliPath, - [...vscodeArgs, '--install-extension', 'ms-python.python', '--install-extension', 'redhat.java', '--install-extension', 'golang.go'], + [...vscodeArgs, '--install-extension', 'ms-python.python', '--install-extension', 'ms-python.vscode-pylance', '--install-extension', 'redhat.java', '--install-extension', 'golang.go'], { encoding: 'utf-8', stdio: 'inherit' @@ -204,4 +206,4 @@ async function main() { } } -main(); \ No newline at end of file +main(); diff --git a/src/test/runTest.ts b/src/test/runTest.ts index 9182205..4d520b6 100644 --- a/src/test/runTest.ts +++ b/src/test/runTest.ts @@ -1,142 +1,217 @@ -import * as cp from 'child_process'; -import * as path from 'path'; -import { - downloadAndUnzipVSCode, - resolveCliArgsFromVSCodeExecutablePath, - runTests -} from '@vscode/test-electron'; - - -// Define types locally instead of importing from config.ts -export enum PromptType { - BASIC = 'basic', - DETAILED = 'detailed', - CONCISE = 'concise' -} - -// Add private configuration interface -interface PrivateConfig { - openaiApiKey: string; - deepseekApiKey: string; - localLLMUrl: string; - proxyUrl?: string; -} - -export type Provider = 'openai' | 'local' | 'deepseek'; - - -const DEFAULT_TEST_CONFIG = { - MODEL: 'deepseek-chat', - PROVIDER: 'deepseek' as Provider, - EXP_PROB: '0.2', - TIMEOUT: '0', // 0 means no timeout - PARALLEL_COUNT: '4', - MAX_ROUND: '5', - PROMPT_TYPE: PromptType.BASIC -}; - -// Function to load private configuration strictly from environment variables -export function loadPrivateConfig(provider?: Provider): PrivateConfig { - const openaiApiKey = process.env.OPENAI_API_KEY || process.env.TEST_OPENAI_API_KEY; - const deepseekApiKey = process.env.DEEPSEEK_API_KEY || process.env.TEST_DEEPSEEK_API_KEY; - const localLLMUrl = process.env.LOCAL_LLM_URL || process.env.TEST_LOCAL_LLM_URL; - const proxyUrl = process.env.PROXY_URL || process.env.TEST_PROXY_URL; - // const defaultAPIKEY = "1234567890" - // Only validate the API key needed for the selected provider - const selectedProvider = provider || validateProvider(process.env.TEST_PROVIDER || DEFAULT_TEST_CONFIG.PROVIDER); - - if (selectedProvider === 'openai' && !openaiApiKey) { - console.warn( - 'Missing required environment variable: OPENAI_API_KEY (or TEST_OPENAI_API_KEY). Ensure you have sourced your .env.sh, or set it through vscode settings.' - ); - } - - if (selectedProvider === 'deepseek' && !deepseekApiKey) { - console.warn( - 'Missing required environment variable: DEEPSEEK_API_KEY (or TEST_DEEPSEEK_API_KEY). Ensure you have sourced your .env.sh, or set it through vscode settings.' - ); - } - - if (selectedProvider === 'local' && !localLLMUrl) { - console.warn( - 'Missing required environment variable: LOCAL_LLM_URL (or TEST_LOCAL_LLM_URL). Ensure you have sourced your .env.sh, or set it through vscode settings.' - ); - } - - return { - openaiApiKey: openaiApiKey || '', - deepseekApiKey: deepseekApiKey || '', - localLLMUrl: localLLMUrl || '', - proxyUrl - }; -} - -// Helper function to validate provider -function validateProvider(value: string): Provider { - if (['openai', 'local', 'deepseek'].includes(value)) { - return value as Provider; - } - console.warn(`Invalid provider: ${value}. Using default: ${DEFAULT_TEST_CONFIG.PROVIDER}`); - return DEFAULT_TEST_CONFIG.PROVIDER; -} - -async function main() { - try { - const extensionDevelopmentPath = path.resolve(__dirname, '../../../'); - const vscodeExecutablePath = await downloadAndUnzipVSCode('1.98.2'); - const [cliPath, ...args] = resolveCliArgsFromVSCodeExecutablePath(vscodeExecutablePath); - - const extensionTestsPath = path.resolve(__dirname, './suite/index'); - // const specificTest = process.env.npm_config_testfile || undefined; - // Add after installation - const installedExtensions = cp.execSync( - `${cliPath} ${args.join(' ')} --list-extensions`, - { - encoding: 'utf-8', - timeout: 5000, - stdio: 'pipe', - env: { ...process.env, DONT_PROMPT_WSL_INSTALL: '1' } - } - ); - console.log('installedExtensions', installedExtensions); - // Use cp.spawn / cp.exec for custom setup - // const installExtensions = ['ms-python.python', 'oracle.oracle-java', 'golang.go']; - cp.spawnSync( - cliPath, - [...args, '--install-extension', 'ms-python.python', '--install-extension', 'redhat.java', '--install-extension', 'golang.go', '--install-extension', 'ms-vscode.cpptools'], - { - encoding: 'utf-8', - stdio: 'inherit', - env: { ...process.env, DONT_PROMPT_WSL_INSTALL: '1' } - } - ); - const privateConfig = loadPrivateConfig(); - // Run the extension test - await runTests({ - // Use the specified `code` executable - vscodeExecutablePath, - extensionDevelopmentPath, - extensionTestsPath, - extensionTestsEnv: { - NODE_ENV: 'test', - TEST_MODEL: DEFAULT_TEST_CONFIG.MODEL, - TEST_PROVIDER: validateProvider(DEFAULT_TEST_CONFIG.PROVIDER), - TEST_EXP_PROB: DEFAULT_TEST_CONFIG.EXP_PROB, - TEST_TIMEOUT: DEFAULT_TEST_CONFIG.TIMEOUT, - TEST_PARALLEL_COUNT: DEFAULT_TEST_CONFIG.PARALLEL_COUNT, - TEST_MAX_ROUND: DEFAULT_TEST_CONFIG.MAX_ROUND, - TEST_PROMPT_TYPE: DEFAULT_TEST_CONFIG.PROMPT_TYPE, - TEST_OPENAI_API_KEY: privateConfig.openaiApiKey, - TEST_SUMMARIZE_CONTEXT: 'true', // Add this line - TEST_DEEPSEEK_API_KEY: privateConfig.deepseekApiKey, - TEST_LOCAL_LLM_URL: privateConfig.localLLMUrl, - TEST_PROXY_URL: privateConfig.proxyUrl - } - }); - } catch (err) { - console.error('Failed to run tests', err); - process.exit(1); - } -} - -main(); +import * as cp from 'child_process'; +import * as path from 'path'; +import { + downloadAndUnzipVSCode, + resolveCliArgsFromVSCodeExecutablePath, + runTests +} from '@vscode/test-electron'; + +export enum PromptType { + BASIC = 'basic', + DETAILED = 'detailed', + CONCISE = 'concise' +} + +interface PrivateConfig { + openaiApiKey: string; + deepseekApiKey: string; + localLLMUrl: string; + proxyUrl?: string; +} + +export type Provider = 'openai' | 'local' | 'deepseek'; + +const DEFAULT_TEST_CONFIG = { + MODEL: 'deepseek-chat', + PROVIDER: 'deepseek' as Provider, + EXP_PROB: '0.2', + TIMEOUT: '0', + PARALLEL_COUNT: '1', + MAX_ROUND: '5', + PROMPT_TYPE: PromptType.BASIC +}; + +const DEFAULT_VSCODE_VERSION = process.env.LSPRAG_TEST_VSCODE_VERSION || 'stable'; + +function getCliArgValue(names: string[]): string | undefined { + const argv = process.argv.slice(2); + for (let i = 0; i < argv.length; i++) { + const arg = argv[i]; + for (const name of names) { + const flag = `--${name}`; + if (arg === flag) { + const next = argv[i + 1]; + if (next && !next.startsWith('--')) { + return next; + } + } + if (arg.startsWith(`${flag}=`)) { + return arg.slice(flag.length + 1); + } + } + } + return undefined; +} + +export function loadPrivateConfig(provider?: Provider): PrivateConfig { + const openaiApiKey = process.env.OPENAI_API_KEY || process.env.TEST_OPENAI_API_KEY; + const deepseekApiKey = process.env.DEEPSEEK_API_KEY || process.env.TEST_DEEPSEEK_API_KEY; + const localLLMUrl = process.env.LOCAL_LLM_URL || process.env.TEST_LOCAL_LLM_URL; + const proxyUrl = process.env.PROXY_URL || process.env.TEST_PROXY_URL; + const selectedProvider = provider || validateProvider(process.env.TEST_PROVIDER || DEFAULT_TEST_CONFIG.PROVIDER); + + if (selectedProvider === 'openai' && !openaiApiKey) { + console.warn( + 'Missing required environment variable: OPENAI_API_KEY (or TEST_OPENAI_API_KEY). Ensure you have sourced your .env.sh, or set it through vscode settings.' + ); + } + + if (selectedProvider === 'deepseek' && !deepseekApiKey) { + console.warn( + 'Missing required environment variable: DEEPSEEK_API_KEY (or TEST_DEEPSEEK_API_KEY). Ensure you have sourced your .env.sh, or set it through vscode settings.' + ); + } + + if (selectedProvider === 'local' && !localLLMUrl) { + console.warn( + 'Missing required environment variable: LOCAL_LLM_URL (or TEST_LOCAL_LLM_URL). Ensure you have sourced your .env.sh, or set it through vscode settings.' + ); + } + + return { + openaiApiKey: openaiApiKey || '', + deepseekApiKey: deepseekApiKey || '', + localLLMUrl: localLLMUrl || '', + proxyUrl + }; +} + +function validateProvider(value: string): Provider { + if (['openai', 'local', 'deepseek'].includes(value)) { + return value as Provider; + } + console.warn(`Invalid provider: ${value}. Using default: ${DEFAULT_TEST_CONFIG.PROVIDER}`); + return DEFAULT_TEST_CONFIG.PROVIDER; +} + +async function main() { + try { + delete process.env.ELECTRON_RUN_AS_NODE; + process.env.DONT_PROMPT_WSL_INSTALL = '1'; + + const extensionDevelopmentPath = path.resolve(__dirname, '../../../'); + const vscodeExecutablePath = await downloadAndUnzipVSCode(DEFAULT_VSCODE_VERSION); + const [cliPath, ...args] = resolveCliArgsFromVSCodeExecutablePath(vscodeExecutablePath); + const extensionTestsPath = path.resolve(__dirname, './suite/index'); + + cp.spawnSync( + cliPath, + [ + ...args, + '--install-extension', 'ms-python.python', + '--install-extension', 'ms-python.vscode-pylance', + '--install-extension', 'redhat.java', + '--install-extension', 'golang.go', + '--install-extension', 'ms-vscode.cpptools' + ], + { + encoding: 'utf-8', + stdio: 'inherit', + env: { ...process.env, DONT_PROMPT_WSL_INSTALL: '1' } + } + ); + + try { + const installedExtensions = cp.execSync( + `${cliPath} ${args.join(' ')} --list-extensions`, + { + encoding: 'utf-8', + timeout: 5000, + stdio: 'pipe', + env: { ...process.env, DONT_PROMPT_WSL_INSTALL: '1' } + } + ); + console.log('installedExtensions', installedExtensions); + } catch { + console.log('Skipping extension list check'); + } + + const model = + getCliArgValue(['model']) || + process.env.npm_config_model || + process.env.TEST_MODEL || + DEFAULT_TEST_CONFIG.MODEL; + const providerRaw = + getCliArgValue(['provider']) || + process.env.npm_config_provider || + process.env.TEST_PROVIDER || + DEFAULT_TEST_CONFIG.PROVIDER; + const provider = validateProvider(providerRaw); + const privateConfig = loadPrivateConfig(provider); + const projectName = + getCliArgValue(['projectName', 'project-name']) || + process.env.npm_config_projectname || + process.env.npm_config_project_name || + process.env.TEST_PROJECT_NAME; + const taskListPath = + getCliArgValue(['taskListPath', 'task-list-path', 'taskList', 'task-list']) || + process.env.npm_config_tasklistpath || + process.env.npm_config_task_list_path || + process.env.npm_config_tasklist || + process.env.npm_config_task_list || + process.env.TEST_TASK_LIST_PATH; + const testType = + getCliArgValue(['testType', 'test-type']) || + process.env.npm_config_testtype || + process.env.npm_config_test_type || + process.env.TEST_TYPE; + const testConfigPath = + getCliArgValue(['testConfigPath', 'test-config-path']) || + process.env.npm_config_testconfigpath || + process.env.npm_config_test_config_path || + process.env.TEST_CONFIG_PATH; + const parallelCountRaw = + getCliArgValue(['parallelCount', 'parallel-count']) || + process.env.npm_config_parallelcount || + process.env.npm_config_parallel_count || + process.env.TEST_PARALLEL_COUNT || + DEFAULT_TEST_CONFIG.PARALLEL_COUNT; + const parallelCount = Number.parseInt(parallelCountRaw, 10); + + console.log(`[test-runner] TEST_PROJECT_NAME=${projectName}`); + console.log(`[test-runner] TEST_TASK_LIST_PATH=${taskListPath}`); + console.log(`[test-runner] TEST_PARALLEL_COUNT=${parallelCount}`); + console.log(`[test-runner] TEST_MODEL=${model}`); + console.log(`[test-runner] TEST_PROVIDER=${provider}`); + + await runTests({ + vscodeExecutablePath, + extensionDevelopmentPath, + extensionTestsPath, + extensionTestsEnv: { + NODE_ENV: 'test', + TEST_MODEL: model, + TEST_PROVIDER: provider, + TEST_EXP_PROB: DEFAULT_TEST_CONFIG.EXP_PROB, + TEST_TIMEOUT: DEFAULT_TEST_CONFIG.TIMEOUT, + TEST_PARALLEL_COUNT: String(parallelCount), + TEST_MAX_ROUND: DEFAULT_TEST_CONFIG.MAX_ROUND, + TEST_PROMPT_TYPE: DEFAULT_TEST_CONFIG.PROMPT_TYPE, + TEST_OPENAI_API_KEY: privateConfig.openaiApiKey, + TEST_SUMMARIZE_CONTEXT: 'true', + TEST_DEEPSEEK_API_KEY: privateConfig.deepseekApiKey, + TEST_LOCAL_LLM_URL: privateConfig.localLLMUrl, + TEST_PROXY_URL: privateConfig.proxyUrl, + TEST_PROJECT_NAME: projectName, + TEST_TASK_LIST_PATH: taskListPath, + TEST_TYPE: testType, + TEST_CONFIG_PATH: testConfigPath, + } + }); + } catch (err) { + console.error('Failed to run tests', err); + process.exit(1); + } +} + +main();