From e7a7f2ab3c844fdcd57361e781e6cd53540c8321 Mon Sep 17 00:00:00 2001 From: c0dd3vi11 Date: Sun, 8 Jun 2025 00:53:04 +0300 Subject: [PATCH] gigachat support --- .cursor/rules/ai-output-variants.mdc | 69 ++++ .cursor/rules/change-build-run-repeat.mdc | 22 ++ .cursor/rules/project-files-description.mdc | 72 ++++ .gitignore | 4 +- CHANGELOG.md | 9 + README-zh-Hans.md | 2 +- README.md | 96 ++++- package-lock.json | 324 ++++++++++++++++- package.json | 4 +- src/commands/chat.ts | 37 +- src/helpers/completion.ts | 309 +++++++++------- src/helpers/config.ts | 229 ++++++++++-- src/helpers/engines/config-engine.ts | 13 + src/helpers/engines/engine-api.ts | 64 ++++ src/helpers/engines/engine-factory.ts | 19 + src/helpers/engines/gigachat-engine.ts | 367 ++++++++++++++++++++ src/helpers/logger.ts | 117 +++++++ src/prompt.ts | 72 ++-- 18 files changed, 1602 insertions(+), 227 deletions(-) create mode 100644 .cursor/rules/ai-output-variants.mdc create mode 100644 .cursor/rules/change-build-run-repeat.mdc create mode 100644 .cursor/rules/project-files-description.mdc create mode 100644 src/helpers/engines/config-engine.ts create mode 100644 src/helpers/engines/engine-api.ts create mode 100644 src/helpers/engines/engine-factory.ts create mode 100644 src/helpers/engines/gigachat-engine.ts create mode 100644 src/helpers/logger.ts diff --git a/.cursor/rules/ai-output-variants.mdc b/.cursor/rules/ai-output-variants.mdc new file mode 100644 index 0000000..0adcea5 --- /dev/null +++ b/.cursor/rules/ai-output-variants.mdc @@ -0,0 +1,69 @@ +--- +description: +globs: +alwaysApply: false +--- +Examples of correct responses. Use these program responses as a reference to understand that it's working correctly. + +1. +``` +> npx ai my ip + +┌ AI Shell +│ +◇ Your script: +│ +│ curl ipinfo.io/ip +│ +◇ Explanation: +│ +│ Request to a public service that returns your IP address. +│ +◆ Run this script? +│ ● ✅ Yes (Lets go!) +│ ○ 📝 Revise +│ ○ ❌ Cancel +└ +``` + +2. +``` +> npx ai 'what is the weather in New York' + +┌ AI Shell +│ +◇ Your script: +│ +│ curl wttr.in/NewYork +│ +◇ Explanation: +│ +│ Popular weather service that prints weather in a beautiful format with ASCII graphics. +│ +◆ Run this script? +│ ● ✅ Yes (Lets go!) +│ ○ 📝 Revise +│ ○ ❌ Cancel +└ +``` + +3. +``` +> npx ai find all log files + +┌ AI Shell +│ +◇ Your script: +│ +│ find . -type f -name '*.log' +│ +◇ Explanation: +│ +│ Searches for log files in the current directory +│ +◆ Run this script? +│ ● ✅ Yes (Lets go!) +│ ○ 📝 Revise +│ ○ ❌ Cancel +└ +``` diff --git a/.cursor/rules/change-build-run-repeat.mdc b/.cursor/rules/change-build-run-repeat.mdc new file mode 100644 index 0000000..d8f7d77 --- /dev/null +++ b/.cursor/rules/change-build-run-repeat.mdc @@ -0,0 +1,22 @@ +--- +description: +globs: +alwaysApply: true +--- +## Development Instructions (Always Apply) + +1. Install dependencies ($1): `npm install` - only if a new dependency was used and/or an old one was removed. +2. Build ($2): `npm run build` - if code was changed and needs to be rebuilt. +3. Run ($3): `npx ai 'description of what needs to be done'` - for functionality testing (for understanding how it should work, see `.cursor/rules/ai-output-variants.mdc`). +4. Fix, log, debug, change dependencies ($1), rebuild ($2), and run ($3) as needed until the task is completed. + +p.s. For convenience, you can run commands ^2 and ^3 together using `$1 && $2` at once. + +## Additional (if required) + +Log ONLY using `src/helpers/logger.ts` (methods). Do not use `console.log`. Code snippets for use: +- Import: `import { logger } from './logger';` +- Call: `logger.debug(message: string, data?: any);` (as well as `info`, `error`) +- See logs here: `./logs/ai-shell.log` + +If you need to understand the project structure, see `.cursor/rules/project-files-description.mdc`. diff --git a/.cursor/rules/project-files-description.mdc b/.cursor/rules/project-files-description.mdc new file mode 100644 index 0000000..f839ce9 --- /dev/null +++ b/.cursor/rules/project-files-description.mdc @@ -0,0 +1,72 @@ +--- +description: +globs: +alwaysApply: false +--- +## Project Structure + +1. **Main files:** + - `src/cli.ts` - Entry point of the CLI application, handles user commands + - `src/prompt.ts` - Responsible for the beautiful animated interface and response display + +2. **AI engines (`src/helpers/engines/`):** + - `engine-api.ts` - Abstract interface for all AI engines + - `engine-factory.ts` - Factory for creating AI engine instances + - `config-platform.ts` - Configuration for AI engines + +3. **Helper modules (`src/helpers/`):** + - `completion.ts` - **Key file!** Implementation of OpenAI API integration, response generation and processing (should be in `src/helpers/engines/`, but left here for better git-diff) + - `logger.ts` - Logging system with file rotation support and debug/info/error levels + - `config.ts` - Application settings and configuration management + - `error.ts` - Error handling and exceptions + - `i18n.ts` - Internationalization and interface translations + - `shell-history.ts` - Shell command history management + - `constants.ts` - Application constants + - `os-detect.ts` - Operating system and shell detection + - `stream-to-iterable.ts` - Utility for converting streams to iterable objects + - `stream-to-string.ts` - Utility for converting streams to strings + - `strip-regex-patterns.ts` - Utility for cleaning text from regex patterns + - `replace-all-polyfill.ts` - Polyfill for the replaceAll method + +4. **Commands (`src/commands/`):** + - `chat.ts` - Interactive AI chat implementation + - `config.ts` - Application configuration via CLI interface + - `update.ts` - Automatic application updates + +5. **Localization (`src/locales/`):** + - Translation files in JSON format for different languages: + - `en.json` - English + - `ru.json` - Russian + - `de.json` - German + - `fr.json` - French + - `es.json` - Spanish + - `pt.json` - Portuguese + - `zh-Hans.json`, `zh-Hant.json` - Chinese (Simplified and Traditional) + - `jp.json` - Japanese + - `ko.json` - Korean + - `ar.json` - Arabic + - `tr.json` - Turkish + - `uk.json` - Ukrainian + - `vi.json` - Vietnamese + - `id.json` - Indonesian + +6. **Configuration files (project root):** + - `package.json` - Package description and npm dependencies + - `tsconfig.json` - TypeScript compiler settings + - `.eslintrc.cjs` - ESLint settings for code checking + - `.prettierrc.json` - Code formatting settings + - `.nvmrc` - Node.js version for the project + +7. **Documentation:** + - `README.md` - Main project documentation + - `README-zh-Hans.md` - Documentation in Chinese + - `CHANGELOG.md` - Version change list + - `CONTRIBUTING.md` - Contributor guide + - `GIGACHAT_TZ.md` - Technical specification for GigaChat integration + - `LICENSE` - Project license + +8. **Working directories:** + - `dist/` - Compiled files for production + - `logs/` - Application log files + - `node_modules/` - Installed npm dependencies + - `gigachat-js/` - Repository with source code of the module for working with GigaChat API diff --git a/.gitignore b/.gitignore index 0738cb5..520455f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ node_modules .DS_Store -dist \ No newline at end of file +dist +gigachat-js/ +logs/ diff --git a/CHANGELOG.md b/CHANGELOG.md index ffd905d..f545468 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,12 @@ +## dev + +- **NEW ENGINE**: Added support for GigaChat +- **BREAKING**: Renamed `MODEL` config to `OPENAI_MODEL` for clarity +- **DEFAULT**: Changed default OpenAI model to `gpt-4.1-nano` (most cost-effective) +- **FEATURE**: Added ability to change default config file path via `AI_SHELL_CONFIG_PATH` environment variable +- **FEATURE**: Added Proxy PAC URL configuration support (`PROXY_PAC_URL`) +- **FEATURE**: Added separate `ALL_PROXY` configuration for each AI engine (`OPENAI_ALLPROXY`, `GIGACHAT_ALLPROXY`) + ## 1.0.12 - Bug fixes diff --git a/README-zh-Hans.md b/README-zh-Hans.md index ea54d73..7d22e02 100644 --- a/README-zh-Hans.md +++ b/README-zh-Hans.md @@ -182,7 +182,7 @@ ai config │ ○ OpenAI Key │ ○ OpenAI API Endpoint │ ○ Silent Mode -│ ● Model (gpt-4o-mini) +│ ● Model (gpt-4.1-nano) │ ○ Language │ ○ Cancel └ diff --git a/README.md b/README.md index c630871..5622231 100644 --- a/README.md +++ b/README.md @@ -34,17 +34,36 @@ npm install -g @builder.io/ai-shell ``` -2. Retrieve your API key from [OpenAI](https://platform.openai.com/account/api-keys) +2. Choose and configure your AI engine: + +### Option A: OpenAI (default) + +1. Retrieve your API key from [OpenAI](https://platform.openai.com/account/api-keys) > Note: If you haven't already, you'll have to create an account and set up billing. -3. Set the key so ai-shell can use it: +2. Set the key so ai-shell can use it: + + ```sh + ai config set OPENAI_KEY= + ``` + + > The default model is `gpt-4.1-nano` (most cost-effective). You can change it using `ai config set OPENAI_MODEL=` + +### Option B: GigaChat + +1. Get your GigaChat API credentials from [AI Platform](https://developers.sber.ru/docs/ru/gigachat/individuals-quickstart) + +2. Configure GigaChat as your AI engine: ```sh - ai config set OPENAI_KEY= + ai config set AI_ENGINE=GigaChat + ai config set GIGACHAT_KEY= ``` - This will create a `.ai-shell` file in your home directory. +--- + + This will create a `.ai-shell` file in your home directory. You can change the path to the config file by setting the `AI_SHELL_CONFIG_PATH` environment variable. ## Usage @@ -63,15 +82,18 @@ Then you will get an output like this, where you can choose to run the suggested ```bash ◇ Your script: │ -│ find . -name "*.log" +│ find . -name "*.log" │ ◇ Explanation: │ -│ 1. Searches for all files with the extension ".log" in the current directory and any subdirectories. +│ 1. Search current directory and subdirectories +│ 2. Find files ending with ".log" │ ◆ Run this script? │ ● ✅ Yes (Lets go!) -│ ○ 📝 Revise +│ ○ 📝 Edit +│ ○ 🔁 Revise +│ ○ 📋 Copy │ ○ ❌ Cancel └ ``` @@ -127,14 +149,38 @@ or save the option as a preference using this command: ai config set SILENT_MODE=true ``` -### Custom API endpoint +### Custom API endpoints -You can custom OpenAI API endpoint to set OPENAI_API_ENDPOINT(default: `https://api.openai.com/v1`) +You can customize API endpoints for both engines: +**For OpenAI** (default: `https://api.openai.com/v1`): ```sh ai config set OPENAI_API_ENDPOINT= ``` +**For GigaChat**: +```sh +ai config set GIGACHAT_API_ENDPOINT= +``` + +### Proxy Configuration + +The application supports advanced proxy settings for both engines: + +**ALL_PROXY settings** (separate for each engine): +```sh +# For OpenAI +ai config set OPENAI_ALLPROXY= + +# For GigaChat +ai config set GIGACHAT_ALLPROXY= +``` + +**Proxy PAC URL** (common setting): +```sh +ai config set PROXY_PAC_URL= +``` + ### Set Language ![Language UI](https://user-images.githubusercontent.com/1784873/235330029-0a3b394c-d797-41d6-8717-9a6b487f1ae8.gif) @@ -178,10 +224,17 @@ To get an interactive UI like below: ```bash ◆ Set config: -│ ○ OpenAI Key -│ ○ OpenAI API Endpoint +│ ● AI Engine (OpenAI) +│ ○ [OpenAI] Key +│ ○ [OpenAI] Model +│ ○ [OpenAI] API Endpoint +│ ○ [OpenAI] ALL_PROXY +│ ○ [GigaChat] Key +│ ○ [GigaChat] Model +│ ○ [GigaChat] API Endpoint +│ ○ [GigaChat] ALL_PROXY +│ ○ [Common] Proxy PAC URL │ ○ Silent Mode -│ ● Model (gpt-4o-mini) │ ○ Language │ ○ Cancel └ @@ -209,12 +262,29 @@ ai update ## Common Issues -### 429 error +### OpenAI Issues + +#### 429 error Some users are reporting a 429 from OpenAI. This is due to incorrect billing setup or excessive quota usage. Please follow [this guide](https://help.openai.com/en/articles/6891831-error-code-429-you-exceeded-your-current-quota-please-check-your-plan-and-billing-details) to fix it. You can activate billing at [this link](https://platform.openai.com/account/billing/overview). Make sure to add a payment method if not under an active grant from OpenAI. +### GigaChat Issues + +#### Authentication errors + +If you encounter authentication issues with GigaChat: +1. Verify your API key is correct +2. Ensure your GigaChat account has proper access rights +3. Check that your credentials haven't expired + +#### Rate limiting + +GigaChat has its own rate limiting. If you encounter 429 errors: +1. Wait a few moments before retrying +2. Check your usage quotas in the Developer Console + ## Motivation I am not a bash wizard, and am dying for access to the copilot CLI, and got impatient. diff --git a/package-lock.json b/package-lock.json index 25be455..6a4c121 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17,10 +17,12 @@ "clipboardy": "^2.3.0", "dedent": "^0.7.0", "execa": "^7.1.1", + "gigachat": "^0.0.14", "i18next": "^22.4.15", "ini": "^4.0.0", "kolorist": "^1.7.0", - "openai": "^3.2.1" + "openai": "^3.2.1", + "proxy-agent": "^6.5.0" }, "bin": { "ai": "dist/cli.mjs", @@ -901,6 +903,12 @@ } } }, + "node_modules/@tootallnate/quickjs-emscripten": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", + "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", + "license": "MIT" + }, "node_modules/@types/dedent": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/@types/dedent/-/dedent-0.7.0.tgz", @@ -1174,6 +1182,15 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, + "node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -1247,17 +1264,36 @@ "node": ">=8" } }, + "node_modules/ast-types": { + "version": "0.13.4", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz", + "integrity": "sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ast-types/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/axios": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.3.5.tgz", - "integrity": "sha512-glL/PvG/E+xCWwV8S6nCHcrfg1exGx7vxyUIivIA1iL7BIh6bePylCfVHwp6k13ao7SATxB6imau2kqY+I67kw==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.9.0.tgz", + "integrity": "sha512-re4CqKTJaURpzbLHtIi6XpDv20/CnpXOtjRY5/CU32L8gU8ek9UIivcfvSWvmKEngmVbrUtPpdDwWDWL7DNHvg==", + "license": "MIT", "dependencies": { - "follow-redirects": "^1.15.0", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -1268,6 +1304,15 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "node_modules/basic-ftp": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz", + "integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", @@ -1512,6 +1557,15 @@ "node": ">= 8" } }, + "node_modules/data-uri-to-buffer": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", + "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -1549,6 +1603,20 @@ "node": ">=0.10.0" } }, + "node_modules/degenerator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/degenerator/-/degenerator-5.0.1.tgz", + "integrity": "sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ==", + "license": "MIT", + "dependencies": { + "ast-types": "^0.13.4", + "escodegen": "^2.1.0", + "esprima": "^4.0.1" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -1638,6 +1706,27 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/escodegen": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", + "license": "BSD-2-Clause", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, "node_modules/eslint": { "version": "8.38.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.38.0.tgz", @@ -1789,6 +1878,19 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/esquery": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", @@ -1970,15 +2072,16 @@ "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", - "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", "funding": [ { "type": "individual", "url": "https://github.com/sponsors/RubenVerborgh" } ], + "license": "MIT", "engines": { "node": ">=4.0" }, @@ -2038,6 +2141,30 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/get-uri": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.4.tgz", + "integrity": "sha512-E1b1lFFLvLgak2whF2xDBcOy6NLVGZBqqjJjsIhvopKfWWEi64pLVTWWehV8KlLerZkfNTA95sTe2OdJKm1OzQ==", + "license": "MIT", + "dependencies": { + "basic-ftp": "^5.0.2", + "data-uri-to-buffer": "^6.0.2", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/gigachat": { + "version": "0.0.14", + "resolved": "https://registry.npmjs.org/gigachat/-/gigachat-0.0.14.tgz", + "integrity": "sha512-BwXDecDxF6aKJT+juuoATrBnFLDBg5Vho1dxYRsgM18zgZ55q5SwNiOgC05/J7rhGY66Pj6Wsnvk3FC6K4IMQw==", + "license": "ISC", + "dependencies": { + "axios": "^1.8.2", + "uuid": "^11.0.3" + } + }, "node_modules/glob": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", @@ -2140,6 +2267,32 @@ "node": ">=8" } }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/human-signals": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", @@ -2228,6 +2381,19 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/ip-address": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", + "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "license": "MIT", + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } + }, "node_modules/is-builtin-module": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", @@ -2377,6 +2543,12 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/jsbn": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", + "license": "MIT" + }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", @@ -2537,6 +2709,15 @@ "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", "dev": true }, + "node_modules/netmask": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", + "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, "node_modules/nice-try": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", @@ -2661,6 +2842,38 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/pac-proxy-agent": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.2.0.tgz", + "integrity": "sha512-TEB8ESquiLMc0lV8vcd5Ql/JAKAoyzHFXaStwjkzpOpC5Yv+pIzLfHvjTSdf3vpa2bMiUQrg9i6276yn8666aA==", + "license": "MIT", + "dependencies": { + "@tootallnate/quickjs-emscripten": "^0.23.0", + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "get-uri": "^6.0.1", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.6", + "pac-resolver": "^7.0.1", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/pac-resolver": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz", + "integrity": "sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==", + "license": "MIT", + "dependencies": { + "degenerator": "^5.0.0", + "netmask": "^2.0.2" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -2786,6 +2999,34 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/proxy-agent": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.5.0.tgz", + "integrity": "sha512-TmatMXdr2KlRiA2CyDu8GqR8EjahTG3aY3nXjdzFyoZbmB8hrBsTyMezhULIXKnC0jpfjlmiZ3+EaCzoInSu/A==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "http-proxy-agent": "^7.0.1", + "https-proxy-agent": "^7.0.6", + "lru-cache": "^7.14.1", + "pac-proxy-agent": "^7.1.0", + "proxy-from-env": "^1.1.0", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", @@ -3018,6 +3259,60 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.8.4", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.4.tgz", + "integrity": "sha512-D3YaD0aRxR3mEcqnidIs7ReYJFVzWdd6fXJYUM8ixcQcJRGTka/b3saV0KflYhyVJXKhb947GndU35SxYNResQ==", + "license": "MIT", + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz", + "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "socks": "^2.8.3" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "license": "BSD-3-Clause", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "license": "BSD-3-Clause" + }, "node_modules/string-argv": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz", @@ -3193,6 +3488,19 @@ "punycode": "^2.1.0" } }, + "node_modules/uuid": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/esm/bin/uuid" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/package.json b/package.json index a6abb27..7c1da6b 100644 --- a/package.json +++ b/package.json @@ -13,10 +13,12 @@ "clipboardy": "^2.3.0", "dedent": "^0.7.0", "execa": "^7.1.1", + "gigachat": "^0.0.14", "i18next": "^22.4.15", "ini": "^4.0.0", "kolorist": "^1.7.0", - "openai": "^3.2.1" + "openai": "^3.2.1", + "proxy-agent": "^6.5.0" }, "repository": { "type": "git", diff --git a/src/commands/chat.ts b/src/commands/chat.ts index 898010f..7a78f88 100644 --- a/src/commands/chat.ts +++ b/src/commands/chat.ts @@ -1,11 +1,13 @@ import { command } from 'cleye'; import { spinner, intro, outro, text, isCancel } from '@clack/prompts'; import { cyan, green } from 'kolorist'; -import { generateCompletion, readData } from '../helpers/completion'; import { getConfig } from '../helpers/config'; import { streamToIterable } from '../helpers/stream-to-iterable'; -import { ChatCompletionRequestMessage } from 'openai'; +import { ChatMessage } from '../helpers/engines/engine-api'; import i18n from '../helpers/i18n'; +import { EngineConfig } from '../helpers/engines/config-engine'; +import { createEngine } from '../helpers/engines/engine-factory'; +import { getEngineConfig } from '../helpers/config'; export default command( { @@ -16,12 +18,9 @@ export default command( }, }, async () => { - const { - OPENAI_KEY: key, - OPENAI_API_ENDPOINT: apiEndpoint, - MODEL: model, - } = await getConfig(); - const chatHistory: ChatCompletionRequestMessage[] = []; + const config = await getConfig(); + const engineConfig = getEngineConfig(config); + const chatHistory: ChatMessage[] = []; console.log(''); intro(i18n.t('Starting new conversation')); @@ -48,9 +47,7 @@ export default command( }); const { readResponse } = await getResponse({ prompt: chatHistory, - key, - model, - apiEndpoint, + engineConfig, }); infoSpin.stop(`${green('AI Shell:')}`); @@ -74,25 +71,19 @@ export default command( async function getResponse({ prompt, number = 1, - key, - model, - apiEndpoint, + engineConfig, }: { - prompt: string | ChatCompletionRequestMessage[]; + prompt: string | ChatMessage[]; number?: number; - model?: string; - key: string; - apiEndpoint: string; + engineConfig: EngineConfig; }) { - const stream = await generateCompletion({ + const engine = createEngine(engineConfig); + const stream = await engine.generateCompletion({ prompt, - key, - model, number, - apiEndpoint, }); const iterableStream = streamToIterable(stream); - return { readResponse: readData(iterableStream) }; + return { readResponse: engine.readData(iterableStream) }; } diff --git a/src/helpers/completion.ts b/src/helpers/completion.ts index 3317196..89819d7 100644 --- a/src/helpers/completion.ts +++ b/src/helpers/completion.ts @@ -15,38 +15,51 @@ import './replace-all-polyfill'; import i18n from './i18n'; import { stripRegexPatterns } from './strip-regex-patterns'; import readline from 'readline'; +import { ProxyAgent } from 'proxy-agent'; +import { logger } from './logger'; +import { EngineConfig } from './engines/config-engine'; +import { EngineApi, ChatMessage } from './engines/engine-api'; const explainInSecondRequest = true; -function getOpenAi(key: string, apiEndpoint: string) { +function getOpenAi(engineConfig: EngineConfig): OpenAIApi { const openAi = new OpenAIApi( - new Configuration({ apiKey: key, basePath: apiEndpoint }) + new Configuration({ + apiKey: engineConfig.apiKey, + basePath: engineConfig.apiEndpoint, + }) ); return openAi; } +function getProxyAgent(engineConfig: EngineConfig): ProxyAgent { + const { proxy, proxyPacUrl } = engineConfig; + const proxyToUse = proxyPacUrl ? `pac+${proxyPacUrl}` : proxy; + logger.debug(proxyToUse ? `Use proxy: ${proxyToUse}` : 'Without proxy'); + return new ProxyAgent({ + getProxyForUrl: () => proxyToUse, + }); +} + // Openai outputs markdown format for code blocks. It oftne uses // a github style like: "```bash" const shellCodeExclusions = [/```[a-zA-Z]*\n/gi, /```[a-zA-Z]*/gi, '\n']; export async function getScriptAndInfo({ prompt, - key, - model, - apiEndpoint, + engineConfig, }: { prompt: string; - key: string; - model?: string; - apiEndpoint: string; -}) { + engineConfig: EngineConfig; +}): Promise<{ + readScript: (writer: (data: string) => void) => Promise, + readInfo: (writer: (data: string) => void) => Promise, +}> { const fullPrompt = getFullPrompt(prompt); const stream = await generateCompletion({ prompt: fullPrompt, number: 1, - key, - model, - apiEndpoint, + engineConfig, }); const iterableStream = streamToIterable(stream); return { @@ -58,30 +71,30 @@ export async function getScriptAndInfo({ export async function generateCompletion({ prompt, number = 1, - key, - model, - apiEndpoint, + engineConfig, }: { - prompt: string | ChatCompletionRequestMessage[]; + prompt: string | ChatMessage[]; number?: number; - model?: string; - key: string; - apiEndpoint: string; -}) { - const openAi = getOpenAi(key, apiEndpoint); + engineConfig: EngineConfig; +}): Promise { + const openAi = getOpenAi(engineConfig); + const agent = getProxyAgent(engineConfig); try { const completion = await openAi.createChatCompletion( { - model: model || 'gpt-4o-mini', + model: engineConfig.modelName, messages: Array.isArray(prompt) - ? prompt + ? (prompt as ChatCompletionRequestMessage[]) : [{ role: 'user', content: prompt }], n: Math.min(number, 10), stream: true, }, - { responseType: 'stream' } + { + responseType: 'stream', + httpAgent: agent, + httpsAgent: agent, + }, ); - return completion.data as unknown as IncomingMessage; } catch (err) { const error = err as AxiosError; @@ -138,22 +151,18 @@ export async function generateCompletion({ export async function getExplanation({ script, - key, - model, - apiEndpoint, + engineConfig, }: { script: string; - key: string; - model?: string; - apiEndpoint: string; -}) { + engineConfig: EngineConfig; +}): Promise<{ + readExplanation: (writer: (data: string) => void) => Promise, +}> { const prompt = getExplanationPrompt(script); const stream = await generateCompletion({ prompt, - key, number: 1, - model, - apiEndpoint, + engineConfig, }); const iterableStream = streamToIterable(stream); return { readExplanation: readData(iterableStream) }; @@ -162,23 +171,19 @@ export async function getExplanation({ export async function getRevision({ prompt, code, - key, - model, - apiEndpoint, + engineConfig, }: { prompt: string; code: string; - key: string; - model?: string; - apiEndpoint: string; -}) { + engineConfig: EngineConfig; +}): Promise<{ + readScript: (writer: (data: string) => void) => Promise, +}> { const fullPrompt = getRevisionPrompt(prompt, code); const stream = await generateCompletion({ prompt: fullPrompt, - key, number: 1, - model, - apiEndpoint, + engineConfig, }); const iterableStream = streamToIterable(stream); return { @@ -186,81 +191,90 @@ export async function getRevision({ }; } -export const readData = - ( - iterableStream: AsyncGenerator, - ...excluded: (RegExp | string | undefined)[] - ) => - (writer: (data: string) => void): Promise => - new Promise(async (resolve) => { - let stopTextStream = false; - let data = ''; - let content = ''; - let dataStart = false; - let buffer = ''; // This buffer will temporarily hold incoming data only for detecting the start - - const [excludedPrefix] = excluded; - const stopTextStreamKeys = ['q', 'escape']; //Group of keys that stop the text stream - - const rl = readline.createInterface({ - input: process.stdin, - }); +export function readData( + iterableStream: AsyncGenerator, + ...excluded: (RegExp | string | undefined)[] +): (writer: (data: string) => void) => Promise { + return (writer: (data: string) => void): Promise => + new Promise(async (resolve: (value: string) => void) => { + readDataImpl(iterableStream, excluded, writer, resolve); + }); +} - process.stdin.setRawMode(true); +async function readDataImpl( + iterableStream: AsyncGenerator, + excluded: (RegExp | string | undefined)[], + writer: (data: string) => void, + resolve: (data: string) => void, +): Promise { + let stopTextStream = false; + let data = ''; + let content = ''; + let dataStart = false; + let buffer = ''; // This buffer will temporarily hold incoming data only for detecting the start + + const [excludedPrefix] = excluded; + const stopTextStreamKeys = ['q', 'escape']; //Group of keys that stop the text stream + + const rl = readline.createInterface({ + input: process.stdin, + }); - process.stdin.on('keypress', (key, data) => { - if (stopTextStreamKeys.includes(data.name)) { - stopTextStream = true; - } - }); - for await (const chunk of iterableStream) { - const payloads = chunk.toString().split('\n\n'); - for (const payload of payloads) { - if (payload.includes('[DONE]') || stopTextStream) { - dataStart = false; - resolve(data); - return; - } + process.stdin.setRawMode(true); - if (payload.startsWith('data:')) { - content = parseContent(payload); - // Use buffer only for start detection - if (!dataStart) { - // Append content to the buffer - buffer += content; - if (buffer.match(excludedPrefix ?? '')) { - dataStart = true; - // Clear the buffer once it has served its purpose - buffer = ''; - if (excludedPrefix) break; - } - } - - if (dataStart && content) { - const contentWithoutExcluded = stripRegexPatterns( - content, - excluded - ); - - data += contentWithoutExcluded; - writer(contentWithoutExcluded); - } + process.stdin.on('keypress', (key, data) => { + if (stopTextStreamKeys.includes(data.name)) { + stopTextStream = true; + } + }); + for await (const chunk of iterableStream) { + const payloads = chunk.toString().split('\n\n'); + for (const payload of payloads) { + if (payload.includes('[DONE]') || stopTextStream) { + dataStart = false; + resolve(data); + return; + } + + if (payload.startsWith('data:')) { + content = parseContent(payload); + // Use buffer only for start detection + if (!dataStart) { + // Append content to the buffer + buffer += content; + if (buffer.match(excludedPrefix ?? '')) { + dataStart = true; + // Clear the buffer once it has served its purpose + buffer = ''; + if (excludedPrefix) break; } } - } - function parseContent(payload: string): string { - const data = payload.replaceAll(/(\n)?^data:\s*/g, ''); - try { - const delta = JSON.parse(data.trim()); - return delta.choices?.[0]?.delta?.content ?? ''; - } catch (error) { - return `Error with JSON.parse and ${payload}.\n${error}`; + if (dataStart && content) { + const contentWithoutExcluded = stripRegexPatterns( + content, + excluded + ); + + data += contentWithoutExcluded; + writer(contentWithoutExcluded); } } + } + } - resolve(data); - }); + function parseContent(payload: string): string { + const data = payload.replaceAll(/(\n)?^data:\s*/g, ''); + try { + const delta = JSON.parse(data.trim()); + return delta.choices?.[0]?.delta?.content ?? ''; + } catch (error) { + return `Error with JSON.parse and ${payload}.\n${error}`; + } + } + + resolve(data); +} function getExplanationPrompt(script: string) { return dedent` @@ -272,7 +286,7 @@ function getExplanationPrompt(script: string) { function getShellDetails() { const shellDetails = detectShell(); - + return dedent` The target shell is ${shellDetails} `; @@ -320,11 +334,70 @@ function getRevisionPrompt(prompt: string, code: string) { } export async function getModels( - key: string, - apiEndpoint: string -): Promise { - const openAi = getOpenAi(key, apiEndpoint); - const response = await openAi.listModels(); + engineConfig: EngineConfig, +): Promise { + logger.debug('Requesting OpenAI models list...'); + const openAi = getOpenAi(engineConfig); + const agent = getProxyAgent(engineConfig); + const response = await openAi.listModels({ + httpAgent: agent, + httpsAgent: agent, + }); + + const models = response.data.data + .filter((model: Model) => model.object === 'model') + .map((model: Model) => model.id); + + logger.debug(`Retrieved ${models.length} models`, { models }); + return models; +} + +export function createOpenAiEngine( + engineConfig: EngineConfig, +): EngineApi { + return { + async getScriptAndInfo(params: { prompt: string }) { + return await getScriptAndInfo({ + prompt: params.prompt, + engineConfig, + }); + }, + + async generateCompletion(params: { + prompt: string | ChatMessage[]; + number?: number; + }) { + return await generateCompletion({ + prompt: params.prompt, + number: params.number, + engineConfig, + }); + }, - return response.data.data.filter((model) => model.object === 'model'); + async getExplanation(params: { script: string }) { + return await getExplanation({ + script: params.script, + engineConfig, + }); + }, + + async getRevision(params: { prompt: string; code: string }) { + return await getRevision({ + prompt: params.prompt, + code: params.code, + engineConfig, + }); + }, + + readData( + iterableStream: AsyncGenerator, + ...excluded: (RegExp | string | undefined)[] + ) { + return readData(iterableStream, ...excluded); + }, + + async getModels() { + return await getModels(engineConfig); + }, + }; } diff --git a/src/helpers/config.ts b/src/helpers/config.ts index cf3549a..b78ced2 100644 --- a/src/helpers/config.ts +++ b/src/helpers/config.ts @@ -8,8 +8,9 @@ import { KnownError, handleCliError } from './error'; import * as p from '@clack/prompts'; import { red } from 'kolorist'; import i18n from './i18n'; -import { getModels } from './completion'; -import { Model } from 'openai'; +import { logger } from './logger'; +import { EngineConfig, EngineType } from './engines/config-engine'; +import { createEngine } from './engines/engine-factory'; const { hasOwnProperty } = Object.prototype; export const hasOwn = (object: unknown, key: PropertyKey) => @@ -29,6 +30,13 @@ const parseAssert = (name: string, condition: any, message: string) => { }; const configParsers = { + AI_ENGINE(engine?: string) { + if (!engine || !Object.values(EngineType).includes(engine as EngineType)) { + return EngineType.OPENAI; + } + return engine as EngineType; + }, + OPENAI_KEY(key?: string) { if (!key) { throw new KnownError( @@ -38,19 +46,49 @@ const configParsers = { return key; }, - MODEL(model?: string) { + OPENAI_MODEL(model?: string) { if (!model || model.length === 0) { - return 'gpt-4o-mini'; + return 'gpt-4.1-nano'; } return model as TiktokenModel; }, - SILENT_MODE(mode?: string) { - return String(mode).toLowerCase() === 'true'; - }, OPENAI_API_ENDPOINT(apiEndpoint?: string) { return apiEndpoint || 'https://api.openai.com/v1'; }, + OPENAI_ALLPROXY(proxy?: string) { + return proxy || ''; + }, + + GIGACHAT_KEY(key?: string) { + if (!key) { + throw new KnownError( + `Please set your GigaChat API key via \`${commandName} config set GIGACHAT_KEY=\`` // TODO: i18n + ); + } + + return key; + }, + GIGACHAT_MODEL(model?: string) { + if (!model || model.length === 0) { + return 'GigaChat-2'; + } + + return model as TiktokenModel; + }, + GIGACHAT_API_ENDPOINT(apiEndpoint?: string) { + return apiEndpoint || 'https://gigachat.devices.sberbank.ru/api/v1'; + }, + GIGACHAT_ALLPROXY(proxy?: string) { + return proxy || ''; + }, + + PROXY_PAC_URL(pacUrl?: string) { + return pacUrl || ''; + }, + SILENT_MODE(mode?: string) { + return String(mode).toLowerCase() === 'true'; + }, LANGUAGE(language?: string) { return language || 'en'; }, @@ -66,7 +104,9 @@ type ValidConfig = { [Key in ConfigKeys]: ReturnType<(typeof configParsers)[Key]>; }; -const configPath = path.join(os.homedir(), '.ai-shell'); +const configPath = process.env.AI_SHELL_CONFIG_PATH + || path.join(os.homedir(), '.ai-shell'); +logger.debug(`Config path: ${configPath}`); const fileExists = (filePath: string) => fs.lstat(filePath).then( @@ -99,6 +139,35 @@ export const getConfig = async ( return parsedConfig as ValidConfig; }; +export function getEngineConfig( + config: ValidConfig, + forcedEngineType?: EngineType, +): EngineConfig { + const engineType = forcedEngineType || config.AI_ENGINE; + if (engineType === EngineType.OPENAI) { + return { + engineType: EngineType.OPENAI, + apiKey: config.OPENAI_KEY, + apiEndpoint: config.OPENAI_API_ENDPOINT, + modelName: config.OPENAI_MODEL, + proxy: config.OPENAI_ALLPROXY, + proxyPacUrl: config.PROXY_PAC_URL, + }; + } else if (engineType === EngineType.GIGACHAT) { + return { + engineType: EngineType.GIGACHAT, + apiKey: config.GIGACHAT_KEY, + apiEndpoint: config.GIGACHAT_API_ENDPOINT, + modelName: config.GIGACHAT_MODEL, + proxy: config.GIGACHAT_ALLPROXY, + proxyPacUrl: config.PROXY_PAC_URL, + }; + } else { + throw new Error(`Unsupported engine type: ${engineType}`); + } +} + + export const setConfigs = async (keyValues: [key: string, value: string][]) => { const config = await readConfigFile(); @@ -121,7 +190,14 @@ export const showConfigUI = async () => { message: i18n.t('Set config') + ':', options: [ { - label: i18n.t('OpenAI Key'), + label: i18n.t('AI Engine'), + value: 'AI_ENGINE', + hint: hasOwn(config, 'AI_ENGINE') + ? config.AI_ENGINE + : i18n.t('(not set)'), + }, + { + label: i18n.t('[OpenAI] Key'), value: 'OPENAI_KEY', hint: hasOwn(config, 'OPENAI_KEY') ? // Obfuscate the key @@ -129,12 +205,59 @@ export const showConfigUI = async () => { : i18n.t('(not set)'), }, { - label: i18n.t('OpenAI API Endpoint'), + label: i18n.t('[OpenAI] Model'), + value: 'OPENAI_MODEL', + hint: hasOwn(config, 'OPENAI_MODEL') ? config.OPENAI_MODEL : i18n.t('(not set)'), + }, + { + label: i18n.t('[OpenAI] API Endpoint'), value: 'OPENAI_API_ENDPOINT', hint: hasOwn(config, 'OPENAI_API_ENDPOINT') ? config.OPENAI_API_ENDPOINT : i18n.t('(not set)'), }, + { + label: i18n.t('[OpenAI] ALL_PROXY'), + value: 'OPENAI_ALLPROXY', + hint: hasOwn(config, 'OPENAI_ALLPROXY') + ? config.OPENAI_ALLPROXY + : i18n.t('(not set)'), + }, + + { + label: i18n.t('[GigaChat] Key'), + value: 'GIGACHAT_KEY', + hint: hasOwn(config, 'GIGACHAT_KEY') + ? // Obfuscate the key + 'Bearer-...' + config.GIGACHAT_KEY.slice(-3) + : i18n.t('(not set)'), + }, + { + label: i18n.t('[GigaChat] Model'), + value: 'GIGACHAT_MODEL', + hint: hasOwn(config, 'GIGACHAT_MODEL') ? config.GIGACHAT_MODEL : i18n.t('(not set)'), + }, + { + label: i18n.t('[GigaChat] API Endpoint'), + value: 'GIGACHAT_API_ENDPOINT', + hint: hasOwn(config, 'GIGACHAT_API_ENDPOINT') + ? config.GIGACHAT_API_ENDPOINT + : i18n.t('(not set)'), + }, + { + label: i18n.t('[GigaChat] ALL_PROXY'), + value: 'GIGACHAT_ALLPROXY', + hint: hasOwn(config, 'GIGACHAT_ALLPROXY') + ? config.GIGACHAT_ALLPROXY + : i18n.t('(not set)'), + }, + { + label: i18n.t('[Common] Proxy PAC URL'), + value: 'PROXY_PAC_URL', + hint: hasOwn(config, 'PROXY_PAC_URL') + ? config.PROXY_PAC_URL + : i18n.t('(not set)'), + }, { label: i18n.t('Silent Mode'), value: 'SILENT_MODE', @@ -142,11 +265,6 @@ export const showConfigUI = async () => { ? config.SILENT_MODE.toString() : i18n.t('(not set)'), }, - { - label: i18n.t('Model'), - value: 'MODEL', - hint: hasOwn(config, 'MODEL') ? config.MODEL : i18n.t('(not set)'), - }, { label: i18n.t('Language'), value: 'LANGUAGE', @@ -164,7 +282,18 @@ export const showConfigUI = async () => { if (p.isCancel(choice)) return; - if (choice === 'OPENAI_KEY') { + if (choice === 'AI_ENGINE') { + const engineType = await p.select({ + initialValue: (await getConfig()).AI_ENGINE.toString(), + message: i18n.t('Select AI Engine'), + options: Object.values(EngineType).map((m: string) => { + return { value: m, label: m }; + }), + }); + if (p.isCancel(engineType)) return; + await setConfigs([['AI_ENGINE', engineType as string]]); + + } else if (choice === 'OPENAI_KEY') { const key = await p.text({ message: i18n.t('Enter your OpenAI API key'), validate: (value) => { @@ -175,33 +304,87 @@ export const showConfigUI = async () => { }); if (p.isCancel(key)) return; await setConfigs([['OPENAI_KEY', key]]); + } else if (choice === 'GIGACHAT_KEY') { + const key = await p.text({ + message: i18n.t('Enter your GigaChat API key'), + validate: (value) => { + if (!value.length) { + return i18n.t('Please enter a key'); + } + }, + }); + if (p.isCancel(key)) return; + await setConfigs([['GIGACHAT_KEY', key]]); } else if (choice === 'OPENAI_API_ENDPOINT') { const apiEndpoint = await p.text({ message: i18n.t('Enter your OpenAI API Endpoint'), }); if (p.isCancel(apiEndpoint)) return; await setConfigs([['OPENAI_API_ENDPOINT', apiEndpoint]]); + } else if (choice === 'GIGACHAT_API_ENDPOINT') { + const apiEndpoint = await p.text({ + message: i18n.t('Enter your GigaChat API Endpoint'), + }); + if (p.isCancel(apiEndpoint)) return; + await setConfigs([['GIGACHAT_API_ENDPOINT', apiEndpoint]]); + } else if (choice === 'OPENAI_ALLPROXY') { + const proxy = await p.text({ + message: i18n.t('Enter your OpenAI ALL_PROXY'), + }); + if (p.isCancel(proxy)) return; + await setConfigs([['OPENAI_ALLPROXY', proxy]]); + } else if (choice === 'GIGACHAT_ALLPROXY') { + const proxy = await p.text({ + message: i18n.t('Enter your GigaChat ALL_PROXY'), + }); + if (p.isCancel(proxy)) return; + await setConfigs([['GIGACHAT_ALLPROXY', proxy]]); + } else if (choice === 'PROXY_PAC_URL') { + const pacUrl = await p.text({ + message: i18n.t('Enter your Proxy PAC URL'), + }); + if (p.isCancel(pacUrl)) return; + await setConfigs([['PROXY_PAC_URL', pacUrl]]); } else if (choice === 'SILENT_MODE') { const silentMode = await p.confirm({ + initialValue: (await getConfig()).SILENT_MODE, message: i18n.t('Enable silent mode?'), }); if (p.isCancel(silentMode)) return; await setConfigs([['SILENT_MODE', silentMode ? 'true' : 'false']]); - } else if (choice === 'MODEL') { - const { OPENAI_KEY: key, OPENAI_API_ENDPOINT: apiEndpoint } = - await getConfig(); - const models = await getModels(key, apiEndpoint); + } else if (choice === 'OPENAI_MODEL') { + const config = await getConfig(); + const engineConfig = getEngineConfig(config, EngineType.OPENAI); + const engine = createEngine(engineConfig); + const models = await engine.getModels(); + const model = (await p.select({ + initialValue: engineConfig.modelName, + message: 'Pick a model.', + options: models.map((m: string) => { + return { value: m, label: m }; + }), + })) as string; + + if (p.isCancel(model)) return; + await setConfigs([['OPENAI_MODEL', model]]); + } else if (choice === 'GIGACHAT_MODEL') { + const config = await getConfig(); + const engineConfig = getEngineConfig(config, EngineType.GIGACHAT); + const engine = createEngine(engineConfig); + const models = await engine.getModels(); const model = (await p.select({ + initialValue: engineConfig.modelName, message: 'Pick a model.', - options: models.map((m: Model) => { - return { value: m.id, label: m.id }; + options: models.map((m: string) => { + return { value: m, label: m }; }), })) as string; if (p.isCancel(model)) return; - await setConfigs([['MODEL', model]]); + await setConfigs([['GIGACHAT_MODEL', model]]); } else if (choice === 'LANGUAGE') { const language = (await p.select({ + initialValue: (await getConfig()).LANGUAGE, message: i18n.t('Enter the language you want to use'), options: languagesOptions, })) as string; diff --git a/src/helpers/engines/config-engine.ts b/src/helpers/engines/config-engine.ts new file mode 100644 index 0000000..aa1fe9a --- /dev/null +++ b/src/helpers/engines/config-engine.ts @@ -0,0 +1,13 @@ +export enum EngineType { + OPENAI = 'OpenAI', + GIGACHAT = 'GigaChat' +} + +export interface EngineConfig { + engineType: EngineType; + apiKey: string; + apiEndpoint: string; + modelName: string; + proxy: string; + proxyPacUrl: string; +} diff --git a/src/helpers/engines/engine-api.ts b/src/helpers/engines/engine-api.ts new file mode 100644 index 0000000..6ddbf0b --- /dev/null +++ b/src/helpers/engines/engine-api.ts @@ -0,0 +1,64 @@ +import { IncomingMessage } from 'http'; + +/** + * Chat message + */ +export interface ChatMessage { + role: 'user' | 'assistant' | 'system'; + content: string; +} + +/** + * Abstract interface for AI engine + */ +export interface EngineApi { + /** + * Get script and info based on prompt + */ + getScriptAndInfo(params: { + prompt: string; + }): Promise<{ + readScript: (writer: (data: string) => void) => Promise; + readInfo: (writer: (data: string) => void) => Promise; + }>; + + /** + * Generate completion based on prompt + */ + generateCompletion(params: { + prompt: string | ChatMessage[]; + number?: number; + }): Promise; + + /** + * Get explanation for script + */ + getExplanation(params: { + script: string; + }): Promise<{ + readExplanation: (writer: (data: string) => void) => Promise; + }>; + + /** + * Get script revision based on prompt + */ + getRevision(params: { + prompt: string; + code: string; + }): Promise<{ + readScript: (writer: (data: string) => void) => Promise; + }>; + + /** + * Read data from stream + */ + readData( + iterableStream: AsyncGenerator, + ...excluded: (RegExp | string | undefined)[] + ): (writer: (data: string) => void) => Promise; + + /** + * Get list of available models + */ + getModels(): Promise; +} diff --git a/src/helpers/engines/engine-factory.ts b/src/helpers/engines/engine-factory.ts new file mode 100644 index 0000000..d954007 --- /dev/null +++ b/src/helpers/engines/engine-factory.ts @@ -0,0 +1,19 @@ +import { EngineConfig, EngineType } from "./config-engine"; +import { EngineApi } from "./engine-api"; +import { createOpenAiEngine } from "../completion"; +import { createGigaChatEngine } from "./gigachat-engine"; +import { logger } from "../logger"; + +export function createEngine( + engineConfig: EngineConfig, +): EngineApi { + if (engineConfig.engineType === EngineType.OPENAI) { + logger.debug(`Creating OpenAI engine`); + return createOpenAiEngine(engineConfig); + } + if (engineConfig.engineType === EngineType.GIGACHAT) { + logger.debug(`Creating GigaChat engine`); + return createGigaChatEngine(engineConfig); + } + throw new Error(`Unsupported engine: ${engineConfig.engineType}`); +} diff --git a/src/helpers/engines/gigachat-engine.ts b/src/helpers/engines/gigachat-engine.ts new file mode 100644 index 0000000..1949c62 --- /dev/null +++ b/src/helpers/engines/gigachat-engine.ts @@ -0,0 +1,367 @@ +import GigaChat from 'gigachat'; +import { Agent } from 'node:https'; +import dedent from 'dedent'; +import { IncomingMessage } from 'http'; +import { KnownError } from '../error'; +import { streamToIterable } from '../stream-to-iterable'; +import { detectShell } from '../os-detect'; +import '../replace-all-polyfill'; +import i18n from '../i18n'; +import { stripRegexPatterns } from '../strip-regex-patterns'; +import { logger } from '../logger'; +import { EngineConfig } from './config-engine'; +import { EngineApi, ChatMessage } from './engine-api'; + +const explainInSecondRequest = true; + +function getGigaChat(engineConfig: EngineConfig): GigaChat { + const httpsAgent = new Agent({ + rejectUnauthorized: false, // Disable root certificate verification + }); + return new GigaChat({ + credentials: engineConfig.apiKey, + baseUrl: engineConfig.apiEndpoint, + model: engineConfig.modelName, + timeout: 600, + profanityCheck: false, + httpsAgent: httpsAgent, + }); +} + +// GigaChat also uses markdown formatting for code +const shellCodeExclusions = [/```[a-zA-Z]*\n/gi, /```[a-zA-Z]*/gi, '\n']; + +export async function getScriptAndInfo({ + prompt, + engineConfig, +}: { + prompt: string; + engineConfig: EngineConfig; +}): Promise<{ + readScript: (writer: (data: string) => void) => Promise; + readInfo: (writer: (data: string) => void) => Promise; +}> { + const fullPrompt = getFullPrompt(prompt); + const stream = await generateCompletion({ + prompt: fullPrompt, + number: 1, + engineConfig, + }); + const iterableStream = streamToIterable(stream); + return { + readScript: readData(iterableStream, ...shellCodeExclusions), + readInfo: readData(iterableStream, ...shellCodeExclusions), + }; +} + +export async function generateCompletion({ + prompt, + number = 1, + engineConfig, +}: { + prompt: string | ChatMessage[]; + number?: number; + engineConfig: EngineConfig; +}): Promise { + const gigaChat = getGigaChat(engineConfig); + + try { + logger.debug('Generating completion with GigaChat', { + promptType: typeof prompt, + number, + }); + + // Format messages for GigaChat + const messages = Array.isArray(prompt) + ? prompt.map((msg) => ({ role: msg.role, content: msg.content })) + : [{ role: 'user' as const, content: prompt }]; + + // Get stream from GigaChat + const streamEmitter = await gigaChat.stream_readable({ + messages, + model: engineConfig.modelName || 'GigaChat-Pro', + stream: true, + }); + + // Create proper readable stream + const { Readable } = await import('stream'); + const mockStream = new Readable({ + read() { + // Do nothing, data will arrive asynchronously + }, + }) as IncomingMessage; + + // Forward events + streamEmitter.on('chunk', (chunk: any) => { + try { + // Format chunk in Server-Sent Events format + const content = chunk.choices?.[0]?.delta?.content || ''; + if (content) { + const sseData = `data: ${JSON.stringify({ + choices: [ + { + delta: { content }, + }, + ], + })}\n\n`; + mockStream.push(sseData); + } + } catch (err) { + logger.error('Error processing chunk', { error: err?.toString() }); + mockStream.destroy(err as Error); + } + }); + + streamEmitter.on('end', () => { + const endData = 'data: [DONE]\n\n'; + mockStream.push(endData); + mockStream.push(null); // End stream + }); + + streamEmitter.on('error', (error: any) => { + logger.error('GigaChat stream error', { error: error?.toString() }); + mockStream.destroy(error); + }); + + return mockStream; + } catch (err: any) { + logger.error('GigaChat completion error', err); + + if ( + err.message?.includes('401') || + err.message?.includes('authentication') + ) { + throw new KnownError( + dedent` + Request to GigaChat failed with authentication error. Please check your credentials and ensure: + 1. Your GigaChat API key is valid + 2. You have proper access to the GigaChat API + 3. Your credentials haven't expired + + Full error: ${err.message} + ` + ); + } + + if (err.message?.includes('429')) { + throw new KnownError( + dedent` + Request to GigaChat failed with rate limit error (429). Please: + 1. Wait a few moments before retrying + 2. Check your usage quotas + 3. Consider upgrading your plan if needed + + Full error: ${err.message} + ` + ); + } + + throw new KnownError( + dedent` + Request to GigaChat failed: ${err.message} + + Please check your network connection and GigaChat service status. + ` + ); + } +} + +export async function getExplanation({ + script, + engineConfig, +}: { + script: string; + engineConfig: EngineConfig; +}): Promise<{ + readExplanation: (writer: (data: string) => void) => Promise; +}> { + const prompt = getExplanationPrompt(script); + const stream = await generateCompletion({ + prompt, + number: 1, + engineConfig, + }); + const iterableStream = streamToIterable(stream); + return { readExplanation: readData(iterableStream) }; +} + +export async function getRevision({ + prompt, + code, + engineConfig, +}: { + prompt: string; + code: string; + engineConfig: EngineConfig; +}): Promise<{ + readScript: (writer: (data: string) => void) => Promise; +}> { + const fullPrompt = getRevisionPrompt(prompt, code); + const stream = await generateCompletion({ + prompt: fullPrompt, + number: 1, + engineConfig, + }); + const iterableStream = streamToIterable(stream); + return { + readScript: readData(iterableStream, ...shellCodeExclusions), + }; +} + +export function readData( + iterableStream: AsyncGenerator, + ...excluded: (RegExp | string | undefined)[] +): (writer: (data: string) => void) => Promise { + return (writer: (data: string) => void): Promise => + new Promise(async (resolve: (value: string) => void) => { + readDataImpl(iterableStream, excluded, writer, resolve); + }); +} + +async function readDataImpl( + iterableStream: AsyncGenerator, + excluded: (RegExp | string | undefined)[], + writer: (data: string) => void, + resolve: (data: string) => void +): Promise { + const handleStreamChunk = (chunk: string): string => { + const parsed = parseContent(chunk); + if (parsed) { + const sanitized = stripRegexPatterns(parsed, excluded); + writer(sanitized); + return sanitized; + } + return ''; + }; + + try { + let fullResponse = ''; + for await (const chunk of iterableStream) { + const content = handleStreamChunk(chunk); + fullResponse += content; + } + resolve(fullResponse); + } catch (error) { + logger.error('Error reading data stream', error); + resolve(''); + } +} + +function parseContent(payload: string): string { + try { + if (payload.startsWith('data: ')) { + const jsonStr = payload.slice(6); + if (jsonStr === '[DONE]') { + return ''; + } + const parsed = JSON.parse(jsonStr); + return parsed.choices?.[0]?.delta?.content || ''; + } + } catch (err) { + logger.debug('Failed to parse content', { payload, error: err }); + } + return ''; +} + +function getExplanationPrompt(script: string) { + return dedent` + ${explainScript} Please reply in ${i18n.getCurrentLanguagenName()} + + The script: ${script} + `; +} + +function getShellDetails() { + const shellDetails = detectShell(); + return dedent` + The target shell is ${shellDetails} + `; +} +const shellDetails = getShellDetails(); + +const explainScript = dedent` + Please provide a clear, concise description of the script, using minimal words. Outline the steps in a list format. +`; + +function getOperationSystemDetails() { + const os = require('@nexssp/os/legacy'); + return os.name(); +} +const generationDetails = dedent` + Only reply with the single line command surrounded by three backticks. It must be able to be directly run in the target shell. Do not include any other text. + + Make sure the command runs on ${getOperationSystemDetails()} operating system. + `; + +function getFullPrompt(prompt: string) { + return dedent` + Create a single line command that one can enter in a terminal and run, based on what is specified in the prompt. + + ${shellDetails} + + ${generationDetails} + + ${explainInSecondRequest ? '' : explainScript} + + The prompt is: ${prompt} + `; +} + +function getRevisionPrompt(prompt: string, code: string) { + return dedent` + Update the following script based on what is asked in the following prompt. + + The script: ${code} + + The prompt: ${prompt} + + ${generationDetails} + `; +} + +export async function getModels( + engineConfig: EngineConfig, +): Promise { + logger.debug('Requesting GigaChat models list...'); + const gigaChat = getGigaChat(engineConfig); + const response = await gigaChat.getModels(); + const models = response.data?.map((model: any) => model.id); + logger.debug(`Retrieved ${models.length} models`, { models }); + return models; +} + +export function createGigaChatEngine( + engineConfig: EngineConfig +): EngineApi { + return { + async getScriptAndInfo(params: { prompt: string }) { + return getScriptAndInfo({ ...params, engineConfig }); + }, + + async generateCompletion(params: { + prompt: string | ChatMessage[]; + number?: number; + }) { + return generateCompletion({ ...params, engineConfig }); + }, + + async getExplanation(params: { script: string }) { + return getExplanation({ ...params, engineConfig }); + }, + + async getRevision(params: { prompt: string; code: string }) { + return getRevision({ ...params, engineConfig }); + }, + + readData( + iterableStream: AsyncGenerator, + ...excluded: (RegExp | string | undefined)[] + ) { + return readData(iterableStream, ...excluded); + }, + + async getModels() { + return getModels(engineConfig); + }, + }; +} diff --git a/src/helpers/logger.ts b/src/helpers/logger.ts new file mode 100644 index 0000000..75914ec --- /dev/null +++ b/src/helpers/logger.ts @@ -0,0 +1,117 @@ +import fs from 'fs'; +import path from 'path'; +import { createWriteStream } from 'fs'; +import { Transform } from 'stream'; + +const LOG_DIR = path.join(process.cwd(), 'logs'); +const LOG_FILE = path.join(LOG_DIR, 'ai-shell.log'); +const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB + +type LogLevel = 'debug' | 'info' | 'error'; + +interface LogEntry { + timestamp: string; + level: LogLevel; + message: string; + data?: any; +} + +class Logger { + private stream: fs.WriteStream | null = null; + private isDevMode: boolean = false; + private closed = false; + private seen?: WeakSet; + + constructor() { + this.isDevMode = process.argv[1].includes('npx'); + if (this.isDevMode) { + this.ensureLogDirectory(); + this.rotateIfNeeded(); + this.stream = createWriteStream(LOG_FILE, { + flags: 'a', + autoClose: false, + }); + } + // Proper log closing without process.exit() + process.on('exit', () => this.close()); + process.on('SIGINT', () => this.close()); + process.on('SIGTERM', () => this.close()); + } + + private ensureLogDirectory() { + if (!fs.existsSync(LOG_DIR)) { + fs.mkdirSync(LOG_DIR, { recursive: true }); + } + } + + private rotateIfNeeded() { + if (fs.existsSync(LOG_FILE)) { + const stats = fs.statSync(LOG_FILE); + if (stats.size >= MAX_FILE_SIZE) { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const backupFile = path.join(LOG_DIR, `gigachat-${timestamp}.log`); + fs.renameSync(LOG_FILE, backupFile); + } + } + } + + private formatLogEntry(level: LogLevel, message: string, data?: any): string { + const timestamp = new Date().toISOString(); + let dataString = ''; + if (data) { + try { + dataString = JSON.stringify(data, null, 2); + } catch (err) { + // Handle circular objects + dataString = JSON.stringify( + data, + (key, value) => { + if (typeof value === 'object' && value !== null) { + if (this.seen && this.seen.has(value)) { + return '[Circular]'; + } + this.seen = this.seen || new WeakSet(); + this.seen.add(value); + } + return value; + }, + 2 + ); + this.seen = undefined; + } + } + return `${timestamp} [${level}] ${message} ${dataString}\n`; + } + + debug(message: string, data?: any) { + if (this.isDevMode && this.stream) { + this.stream.write(this.formatLogEntry('debug', message, data)); + } + } + + info(message: string, data?: any) { + if (this.isDevMode && this.stream) { + this.stream.write(this.formatLogEntry('info', message, data)); + } + } + + error(message: string, data?: any) { + if (this.isDevMode && this.stream) { + this.stream.write(this.formatLogEntry('error', message, data)); + } + } + + close() { + if (this.stream && !this.closed) { + this.closed = true; + try { + this.stream.end(); + this.stream = null; + } catch (err) { + // Ignore errors when closing the logging stream + } + } + } +} + +export const logger = new Logger(); diff --git a/src/prompt.ts b/src/prompt.ts index 145c07b..d271925 100644 --- a/src/prompt.ts +++ b/src/prompt.ts @@ -1,17 +1,15 @@ import * as p from '@clack/prompts'; import { execaCommand } from 'execa'; import { cyan, dim } from 'kolorist'; -import { - getExplanation, - getRevision, - getScriptAndInfo, -} from './helpers/completion'; import { getConfig } from './helpers/config'; import { projectName } from './helpers/constants'; import { KnownError } from './helpers/error'; import clipboardy from 'clipboardy'; import i18n from './helpers/i18n'; import { appendToShellHistory } from './helpers/shell-history'; +import { EngineConfig } from './helpers/engines/config-engine'; +import { createEngine } from './helpers/engines/engine-factory'; +import { getEngineConfig } from './helpers/config'; const init = async () => { try { @@ -101,13 +99,9 @@ export async function prompt({ usePrompt, silentMode, }: { usePrompt?: string; silentMode?: boolean } = {}) { - const { - OPENAI_KEY: key, - SILENT_MODE, - OPENAI_API_ENDPOINT: apiEndpoint, - MODEL: model, - } = await getConfig(); - const skipCommandExplanation = silentMode || SILENT_MODE; + const config = await getConfig(); + const engineConfig: EngineConfig = getEngineConfig(config); + const skipCommandExplanation = silentMode || config.SILENT_MODE; console.log(''); p.intro(`${cyan(`${projectName}`)}`); @@ -115,11 +109,9 @@ export async function prompt({ const thePrompt = usePrompt || (await getPrompt()); const spin = p.spinner(); spin.start(i18n.t(`Loading...`)); - const { readInfo, readScript } = await getScriptAndInfo({ + const engine = createEngine(engineConfig); + const { readInfo, readScript } = await engine.getScriptAndInfo({ prompt: thePrompt, - key, - model, - apiEndpoint, }); spin.stop(`${i18n.t('Your script')}:`); console.log(''); @@ -131,11 +123,9 @@ export async function prompt({ spin.start(i18n.t(`Getting explanation...`)); const info = await readInfo(process.stdout.write.bind(process.stdout)); if (!info) { - const { readExplanation } = await getExplanation({ + const engine = createEngine(engineConfig); + const { readExplanation } = await engine.getExplanation({ script, - key, - model, - apiEndpoint, }); spin.stop(`${i18n.t('Explanation')}:`); console.log(''); @@ -146,15 +136,17 @@ export async function prompt({ } } - await runOrReviseFlow(script, key, model, apiEndpoint, silentMode); + await runOrReviseFlow( + script, + engineConfig, + silentMode, + ); } async function runOrReviseFlow( script: string, - key: string, - model: string, - apiEndpoint: string, - silentMode?: boolean + engineConfig: EngineConfig, + silentMode?: boolean, ) { const emptyScript = script.trim() === ''; @@ -191,7 +183,11 @@ async function runOrReviseFlow( label: '🔁 ' + i18n.t('Revise'), hint: i18n.t('Give feedback via prompt and get a new result'), value: async () => { - await revisionFlow(script, key, model, apiEndpoint, silentMode); + await revisionFlow( + script, + engineConfig, + silentMode, + ); }, }, { @@ -220,20 +216,16 @@ async function runOrReviseFlow( async function revisionFlow( currentScript: string, - key: string, - model: string, - apiEndpoint: string, - silentMode?: boolean + engineConfig: EngineConfig, + silentMode?: boolean, ) { const revision = await promptForRevision(); const spin = p.spinner(); spin.start(i18n.t(`Loading...`)); - const { readScript } = await getRevision({ + const engine = createEngine(engineConfig); + const { readScript } = await engine.getRevision({ prompt: revision, code: currentScript, - key, - model, - apiEndpoint, }); spin.stop(`${i18n.t(`Your new script`)}:`); @@ -246,11 +238,9 @@ async function revisionFlow( if (!silentMode) { const infoSpin = p.spinner(); infoSpin.start(i18n.t(`Getting explanation...`)); - const { readExplanation } = await getExplanation({ + const engine = createEngine(engineConfig); + const { readExplanation } = await engine.getExplanation({ script, - key, - model, - apiEndpoint, }); infoSpin.stop(`${i18n.t('Explanation')}:`); @@ -261,7 +251,11 @@ async function revisionFlow( console.log(dim('•')); } - await runOrReviseFlow(script, key, model, apiEndpoint, silentMode); + await runOrReviseFlow( + script, + engineConfig, + silentMode, + ); } export const parseAssert = (name: string, condition: any, message: string) => {