diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index cd602d04..419dc73f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -188,7 +188,7 @@ jobs: const {versions: latestNodeVersions} = await getLatestNodeVersions(Date.now() - 1000 * 60 * 60 * 24 * 14); - const nodeVersion = latestNodeVersions.get(18); + const nodeVersion = latestNodeVersions.get(20); const windowsOnArmNodeVersion = latestNodeVersions.get(20); if (nodeVersion == null || windowsOnArmNodeVersion == null) { @@ -389,7 +389,7 @@ jobs: model-dependent-tests: name: Model dependent tests - runs-on: ubuntu-24.04 + runs-on: macos-13 env: NODE_LLAMA_CPP_GPU: false needs: @@ -412,10 +412,16 @@ jobs: name: llama.cpp path: llama - - name: Install dependencies on Ubuntu +# - name: Install dependencies on Ubuntu +# run: | +# sudo apt-get update +# sudo apt-get install ninja-build cmake + + - name: Install dependencies on macOS + if: matrix.config.name == 'macOS' run: | - sudo apt-get update - sudo apt-get install ninja-build cmake + brew install cmake ninja + alias make=cmake - name: Install modules run: npm ci @@ -427,18 +433,29 @@ jobs: run: node ./dist/cli/cli.js inspect gpu - name: Cache models - id: cache-test-models - uses: actions/cache@v4 + id: cache-restore-test-models + uses: actions/cache/restore@v4 with: path: "test/.models/**.gguf" key: cache-test-models-${{ runner.os }}-${{ github.workflow }} - name: Download models or ensure all models are downloaded + id: download-all-test-models run: npm run dev:setup:downloadAllTestModels - name: Run model dependent tests + env: + NODE_OPTIONS: "--max-old-space-size=4096" run: npm run test:modelDependent + - name: Save cached models + id: cache-save-test-models + if: steps.download-all-test-models.outcome == 'success' && always() + uses: actions/cache/save@v4 + with: + path: "test/.models/**.gguf" + key: cache-test-models-${{ runner.os }}-${{ github.workflow }} + release: name: Release if: needs.resolve-next-release.outputs.next-version != '' && needs.resolve-next-release.outputs.next-version != 'false' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a29ffda6..65d916f3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,7 +15,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: "18" + node-version: "20" - name: Install modules run: npm ci - name: ESLint diff --git a/docs/guide/development.md b/docs/guide/development.md index e67b2cc4..5cf1fa87 100644 --- a/docs/guide/development.md +++ b/docs/guide/development.md @@ -79,6 +79,15 @@ lldb node -- ./node_modules/.bin/vite-node ./src/cli/cli.ts chat +``` + +After it finishes loading, type `run` and press Enter for the execution of `node` to start. +When the process crashes, type `bt full` and press Enter to see the stack trace. + ### Updating the Documentation All the documentation is written in Markdown files in the `docs` directory. To see the changes you made to the documentation, run the following command: diff --git a/docs/guide/electron.md b/docs/guide/electron.md index 04984031..cc24321b 100644 --- a/docs/guide/electron.md +++ b/docs/guide/electron.md @@ -9,7 +9,7 @@ Trying to use `node-llama-cpp` on a renderer process will crash the application. You can scaffold an example Electron app that uses `node-llama-cpp` with complete configuration for packaging and distribution by running the following command: ```shell -npm create node-llama-cpp@latest --template electron-typescript-react +npm create node-llama-cpp@latest -- --template electron-typescript-react ``` ::: tip diff --git a/docs/guide/function-calling.md b/docs/guide/function-calling.md index 36823679..759b025a 100644 --- a/docs/guide/function-calling.md +++ b/docs/guide/function-calling.md @@ -408,3 +408,42 @@ getFruitPrice({name: "banana"}) result: {name: "banana", price: "$4"}; ``` + +## Troubleshooting {#troubleshooting} +### Function Calling Issues With [`JinjaTemplateChatWrapper`](../api/classes/JinjaTemplateChatWrapper.md) {#troubleshoot-jinja-function-calling-issues} +If function calling doesn't work well (or at all) with a model you're trying to use, +and the [chat wrapper](./chat-wrapper.md) used by your [`LlamaChatSession`](../api/classes/LlamaChatSession.md) +is a [`JinjaTemplateChatWrapper`](../api/classes/JinjaTemplateChatWrapper.md) +(you can check that by accessing [`.chatWrapper`](../api/classes/LlamaChatSession.md#chatwrapper)), +you can try to force it to not use the function calling template defined in the Jinja template. + +Doing this can help you achieve better function calling performance with some models. + +To do this, create your [`LlamaChatSession`](../api/classes/LlamaChatSession.md) like this: +```typescript +import {fileURLToPath} from "url"; +import path from "path"; +import {getLlama} from "node-llama-cpp"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + +const llama = await getLlama(); +const model = await llama.loadModel({ + modelPath: path.join(__dirname, "models", "Meta-Llama-3-8B-Instruct.Q4_K_M.gguf") +}); +const context = await model.createContext(); + +// ---cut--- +import {LlamaChatSession, resolveChatWrapper} from "node-llama-cpp"; + +const session = new LlamaChatSession({ + contextSequence: context.getSequence(), + chatWrapper: resolveChatWrapper(model, { + customWrapperSettings: { + jinjaTemplate: { + functionCallMessageTemplate: "noJinja" + } + } + }) +}); +``` diff --git a/docs/public/giscus/style.css b/docs/public/giscus/style.css index a85561f8..d4983df1 100644 --- a/docs/public/giscus/style.css +++ b/docs/public/giscus/style.css @@ -31,6 +31,10 @@ body, #__next { border-start-start-radius: 0; } + .gsc-comment .gsc-replies { + padding-top: 0; + } + .gsc-reactions-popover { border-radius: 12px; diff --git a/eslint.config.js b/eslint.config.js index 1a1ae9cd..4eadb835 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -148,7 +148,8 @@ export default tseslint.config({ ] }], "@stylistic/no-trailing-spaces": ["off"], - "@stylistic/no-multi-spaces": ["warn"] + "@stylistic/no-multi-spaces": ["warn"], + "@stylistic/generator-star-spacing": ["off"] } }, { files: ["**/**.{,c,m}ts"], diff --git a/llama/addon/AddonContext.cpp b/llama/addon/AddonContext.cpp index d3d4316f..53b39cad 100644 --- a/llama/addon/AddonContext.cpp +++ b/llama/addon/AddonContext.cpp @@ -583,7 +583,7 @@ Napi::Value AddonContext::DisposeSequence(const Napi::CallbackInfo& info) { int32_t sequenceId = info[0].As().Int32Value(); - bool result = llama_kv_cache_seq_rm(ctx, sequenceId, -1, -1); + bool result = llama_kv_self_seq_rm(ctx, sequenceId, -1, -1); if (!result) { Napi::Error::New(info.Env(), "Failed to dispose sequence").ThrowAsJavaScriptException(); @@ -602,7 +602,7 @@ Napi::Value AddonContext::RemoveTokenCellsFromSequence(const Napi::CallbackInfo& int32_t startPos = info[1].As().Int32Value(); int32_t endPos = info[2].As().Int32Value(); - bool result = llama_kv_cache_seq_rm(ctx, sequenceId, startPos, endPos); + bool result = llama_kv_self_seq_rm(ctx, sequenceId, startPos, endPos); return Napi::Boolean::New(info.Env(), result); } @@ -617,7 +617,7 @@ Napi::Value AddonContext::ShiftSequenceTokenCells(const Napi::CallbackInfo& info int32_t endPos = info[2].As().Int32Value(); int32_t shiftDelta = info[3].As().Int32Value(); - llama_kv_cache_seq_add(ctx, sequenceId, startPos, endPos, shiftDelta); + llama_kv_self_seq_add(ctx, sequenceId, startPos, endPos, shiftDelta); return info.Env().Undefined(); } @@ -639,6 +639,7 @@ Napi::Value AddonContext::GetEmbedding(const Napi::CallbackInfo& info) { } int32_t inputTokensLength = info[0].As().Int32Value(); + int32_t maxVectorSize = (info.Length() > 1 && info[1].IsNumber()) ? info[1].As().Int32Value() : 0; if (inputTokensLength <= 0) { Napi::Error::New(info.Env(), "Invalid input tokens length").ThrowAsJavaScriptException(); @@ -650,15 +651,16 @@ Napi::Value AddonContext::GetEmbedding(const Napi::CallbackInfo& info) { const auto* embeddings = pooling_type == LLAMA_POOLING_TYPE_NONE ? NULL : llama_get_embeddings_seq(ctx, 0); if (embeddings == NULL) { embeddings = llama_get_embeddings_ith(ctx, inputTokensLength - 1); + } - if (embeddings == NULL) { - Napi::Error::New(info.Env(), std::string("Failed to get embeddings for token ") + std::to_string(inputTokensLength - 1)).ThrowAsJavaScriptException(); - return info.Env().Undefined(); - } + if (embeddings == NULL) { + Napi::Error::New(info.Env(), std::string("Failed to get embeddings for token ") + std::to_string(inputTokensLength - 1)).ThrowAsJavaScriptException(); + return info.Env().Undefined(); } - Napi::Float64Array result = Napi::Float64Array::New(info.Env(), n_embd); - for (size_t i = 0; i < n_embd; ++i) { + size_t resultSize = maxVectorSize == 0 ? n_embd : std::min(n_embd, maxVectorSize); + Napi::Float64Array result = Napi::Float64Array::New(info.Env(), resultSize); + for (size_t i = 0; i < resultSize; i++) { result[i] = embeddings[i]; } diff --git a/package-lock.json b/package-lock.json index 8751dc70..bde90819 100644 --- a/package-lock.json +++ b/package-lock.json @@ -26,13 +26,13 @@ "is-unicode-supported": "^2.1.0", "lifecycle-utils": "^2.0.0", "log-symbols": "^7.0.0", - "nanoid": "^5.0.9", - "node-addon-api": "^8.3.0", - "octokit": "^4.1.0", - "ora": "^8.1.1", + "nanoid": "^5.1.5", + "node-addon-api": "^8.3.1", + "octokit": "^4.1.2", + "ora": "^8.2.0", "pretty-ms": "^9.2.0", "proper-lockfile": "^4.1.2", - "semver": "^7.7.0", + "semver": "^7.7.1", "simple-git": "^3.27.0", "slice-ansi": "^7.1.0", "stdout-update": "^4.0.1", @@ -46,56 +46,56 @@ "node-llama-cpp": "dist/cli/cli.js" }, "devDependencies": { - "@commitlint/cli": "^19.6.1", - "@commitlint/config-conventional": "^19.6.0", - "@eslint/compat": "^1.2.5", - "@fontsource/inter": "^5.1.1", - "@nolebase/vitepress-plugin-git-changelog": "^2.12.1", - "@nolebase/vitepress-plugin-og-image": "^2.12.1", + "@commitlint/cli": "^19.8.0", + "@commitlint/config-conventional": "^19.8.0", + "@eslint/compat": "^1.2.7", + "@fontsource/inter": "^5.2.5", + "@nolebase/vitepress-plugin-git-changelog": "^2.15.1", + "@nolebase/vitepress-plugin-og-image": "^2.15.1", "@resvg/resvg-js": "^2.6.2", - "@semantic-release/exec": "^6.0.3", + "@semantic-release/exec": "^7.0.3", "@semantic-release/github": "11.0.1", "@semantic-release/npm": "12.0.1", - "@shikijs/vitepress-twoslash": "^2.2.0", - "@stylistic/eslint-plugin": "^3.0.1", + "@shikijs/vitepress-twoslash": "^3.2.1", + "@stylistic/eslint-plugin": "^4.2.0", "@types/async-retry": "^1.4.9", "@types/bytes": "^3.1.5", "@types/cross-spawn": "^6.0.6", "@types/fs-extra": "^11.0.4", - "@types/node": "^22.12.0", + "@types/node": "^22.13.11", "@types/proper-lockfile": "^4.1.4", "@types/semver": "^7.5.8", "@types/validate-npm-package-name": "^4.0.2", "@types/which": "^3.0.4", "@types/yargs": "^17.0.33", - "@vitest/coverage-v8": "^3.0.4", - "@vitest/ui": "^3.0.4", - "electron": "^34.0.2", - "eslint": "^9.19.0", - "eslint-import-resolver-typescript": "^3.7.0", + "@vitest/coverage-v8": "^3.0.9", + "@vitest/ui": "^3.0.9", + "electron": "^35.0.3", + "eslint": "^9.23.0", + "eslint-import-resolver-typescript": "^4.2.2", "eslint-plugin-import": "^2.31.0", - "eslint-plugin-jsdoc": "^50.6.3", - "eslint-plugin-n": "^17.15.1", + "eslint-plugin-jsdoc": "^50.6.8", + "eslint-plugin-n": "^17.16.2", "feed": "^4.2.2", "husky": "^9.1.7", "rehype": "^13.0.2", "rimraf": "^6.0.1", - "semantic-release": "^24.2.1", + "semantic-release": "^24.2.3", "sharp": "^0.33.5", "tslib": "^2.8.1", - "typedoc": "^0.27.6", - "typedoc-plugin-markdown": "^4.4.1", - "typedoc-plugin-mdn-links": "^4.0.10", + "typedoc": "^0.28.1", + "typedoc-plugin-markdown": "^4.5.2", + "typedoc-plugin-mdn-links": "^5.0.1", "typedoc-vitepress-theme": "^1.1.2", - "typescript": "^5.7.3", - "typescript-eslint": "^8.22.0", - "vite-node": "^3.0.4", + "typescript": "^5.8.2", + "typescript-eslint": "^8.27.0", + "vite-node": "^3.0.9", "vitepress": "^1.6.3", - "vitest": "^3.0.4", - "zx": "^8.3.1" + "vitest": "^3.0.9", + "zx": "^8.4.1" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "funding": { "type": "github", @@ -466,17 +466,17 @@ } }, "node_modules/@commitlint/cli": { - "version": "19.6.1", - "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-19.6.1.tgz", - "integrity": "sha512-8hcyA6ZoHwWXC76BoC8qVOSr8xHy00LZhZpauiD0iO0VYbVhMnED0da85lTfIULxl7Lj4c6vZgF0Wu/ed1+jlQ==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-19.8.0.tgz", + "integrity": "sha512-t/fCrLVu+Ru01h0DtlgHZXbHV2Y8gKocTR5elDOqIRUzQd0/6hpt2VIWOj9b3NDo7y4/gfxeR2zRtXq/qO6iUg==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/format": "^19.5.0", - "@commitlint/lint": "^19.6.0", - "@commitlint/load": "^19.6.1", - "@commitlint/read": "^19.5.0", - "@commitlint/types": "^19.5.0", + "@commitlint/format": "^19.8.0", + "@commitlint/lint": "^19.8.0", + "@commitlint/load": "^19.8.0", + "@commitlint/read": "^19.8.0", + "@commitlint/types": "^19.8.0", "tinyexec": "^0.3.0", "yargs": "^17.0.0" }, @@ -488,13 +488,13 @@ } }, "node_modules/@commitlint/config-conventional": { - "version": "19.6.0", - "resolved": "https://registry.npmjs.org/@commitlint/config-conventional/-/config-conventional-19.6.0.tgz", - "integrity": "sha512-DJT40iMnTYtBtUfw9ApbsLZFke1zKh6llITVJ+x9mtpHD08gsNXaIRqHTmwTZL3dNX5+WoyK7pCN/5zswvkBCQ==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/config-conventional/-/config-conventional-19.8.0.tgz", + "integrity": "sha512-9I2kKJwcAPwMoAj38hwqFXG0CzS2Kj+SAByPUQ0SlHTfb7VUhYVmo7G2w2tBrqmOf7PFd6MpZ/a1GQJo8na8kw==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^19.5.0", + "@commitlint/types": "^19.8.0", "conventional-changelog-conventionalcommits": "^7.0.2" }, "engines": { @@ -502,13 +502,13 @@ } }, "node_modules/@commitlint/config-validator": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/config-validator/-/config-validator-19.5.0.tgz", - "integrity": "sha512-CHtj92H5rdhKt17RmgALhfQt95VayrUo2tSqY9g2w+laAXyk7K/Ef6uPm9tn5qSIwSmrLjKaXK9eiNuxmQrDBw==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/config-validator/-/config-validator-19.8.0.tgz", + "integrity": "sha512-+r5ZvD/0hQC3w5VOHJhGcCooiAVdynFlCe2d6I9dU+PvXdV3O+fU4vipVg+6hyLbQUuCH82mz3HnT/cBQTYYuA==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^19.5.0", + "@commitlint/types": "^19.8.0", "ajv": "^8.11.0" }, "engines": { @@ -516,13 +516,13 @@ } }, "node_modules/@commitlint/ensure": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/ensure/-/ensure-19.5.0.tgz", - "integrity": "sha512-Kv0pYZeMrdg48bHFEU5KKcccRfKmISSm9MvgIgkpI6m+ohFTB55qZlBW6eYqh/XDfRuIO0x4zSmvBjmOwWTwkg==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/ensure/-/ensure-19.8.0.tgz", + "integrity": "sha512-kNiNU4/bhEQ/wutI1tp1pVW1mQ0QbAjfPRo5v8SaxoVV+ARhkB8Wjg3BSseNYECPzWWfg/WDqQGIfV1RaBFQZg==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^19.5.0", + "@commitlint/types": "^19.8.0", "lodash.camelcase": "^4.3.0", "lodash.kebabcase": "^4.1.1", "lodash.snakecase": "^4.1.1", @@ -534,9 +534,9 @@ } }, "node_modules/@commitlint/execute-rule": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/execute-rule/-/execute-rule-19.5.0.tgz", - "integrity": "sha512-aqyGgytXhl2ejlk+/rfgtwpPexYyri4t8/n4ku6rRJoRhGZpLFMqrZ+YaubeGysCP6oz4mMA34YSTaSOKEeNrg==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/execute-rule/-/execute-rule-19.8.0.tgz", + "integrity": "sha512-fuLeI+EZ9x2v/+TXKAjplBJWI9CNrHnyi5nvUQGQt4WRkww/d95oVRsc9ajpt4xFrFmqMZkd/xBQHZDvALIY7A==", "dev": true, "license": "MIT", "engines": { @@ -544,13 +544,13 @@ } }, "node_modules/@commitlint/format": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/format/-/format-19.5.0.tgz", - "integrity": "sha512-yNy088miE52stCI3dhG/vvxFo9e4jFkU1Mj3xECfzp/bIS/JUay4491huAlVcffOoMK1cd296q0W92NlER6r3A==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/format/-/format-19.8.0.tgz", + "integrity": "sha512-EOpA8IERpQstxwp/WGnDArA7S+wlZDeTeKi98WMOvaDLKbjptuHWdOYYr790iO7kTCif/z971PKPI2PkWMfOxg==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^19.5.0", + "@commitlint/types": "^19.8.0", "chalk": "^5.3.0" }, "engines": { @@ -558,13 +558,13 @@ } }, "node_modules/@commitlint/is-ignored": { - "version": "19.6.0", - "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-19.6.0.tgz", - "integrity": "sha512-Ov6iBgxJQFR9koOupDPHvcHU9keFupDgtB3lObdEZDroiG4jj1rzky60fbQozFKVYRTUdrBGICHG0YVmRuAJmw==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-19.8.0.tgz", + "integrity": "sha512-L2Jv9yUg/I+jF3zikOV0rdiHUul9X3a/oU5HIXhAJLE2+TXTnEBfqYP9G5yMw/Yb40SnR764g4fyDK6WR2xtpw==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^19.5.0", + "@commitlint/types": "^19.8.0", "semver": "^7.6.0" }, "engines": { @@ -572,32 +572,32 @@ } }, "node_modules/@commitlint/lint": { - "version": "19.6.0", - "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-19.6.0.tgz", - "integrity": "sha512-LRo7zDkXtcIrpco9RnfhOKeg8PAnE3oDDoalnrVU/EVaKHYBWYL1DlRR7+3AWn0JiBqD8yKOfetVxJGdEtZ0tg==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-19.8.0.tgz", + "integrity": "sha512-+/NZKyWKSf39FeNpqhfMebmaLa1P90i1Nrb1SrA7oSU5GNN/lksA4z6+ZTnsft01YfhRZSYMbgGsARXvkr/VLQ==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/is-ignored": "^19.6.0", - "@commitlint/parse": "^19.5.0", - "@commitlint/rules": "^19.6.0", - "@commitlint/types": "^19.5.0" + "@commitlint/is-ignored": "^19.8.0", + "@commitlint/parse": "^19.8.0", + "@commitlint/rules": "^19.8.0", + "@commitlint/types": "^19.8.0" }, "engines": { "node": ">=v18" } }, "node_modules/@commitlint/load": { - "version": "19.6.1", - "resolved": "https://registry.npmjs.org/@commitlint/load/-/load-19.6.1.tgz", - "integrity": "sha512-kE4mRKWWNju2QpsCWt428XBvUH55OET2N4QKQ0bF85qS/XbsRGG1MiTByDNlEVpEPceMkDr46LNH95DtRwcsfA==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/load/-/load-19.8.0.tgz", + "integrity": "sha512-4rvmm3ff81Sfb+mcWT5WKlyOa+Hd33WSbirTVUer0wjS1Hv/Hzr07Uv1ULIV9DkimZKNyOwXn593c+h8lsDQPQ==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/config-validator": "^19.5.0", - "@commitlint/execute-rule": "^19.5.0", - "@commitlint/resolve-extends": "^19.5.0", - "@commitlint/types": "^19.5.0", + "@commitlint/config-validator": "^19.8.0", + "@commitlint/execute-rule": "^19.8.0", + "@commitlint/resolve-extends": "^19.8.0", + "@commitlint/types": "^19.8.0", "chalk": "^5.3.0", "cosmiconfig": "^9.0.0", "cosmiconfig-typescript-loader": "^6.1.0", @@ -610,9 +610,9 @@ } }, "node_modules/@commitlint/message": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/message/-/message-19.5.0.tgz", - "integrity": "sha512-R7AM4YnbxN1Joj1tMfCyBryOC5aNJBdxadTZkuqtWi3Xj0kMdutq16XQwuoGbIzL2Pk62TALV1fZDCv36+JhTQ==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/message/-/message-19.8.0.tgz", + "integrity": "sha512-qs/5Vi9bYjf+ZV40bvdCyBn5DvbuelhR6qewLE8Bh476F7KnNyLfdM/ETJ4cp96WgeeHo6tesA2TMXS0sh5X4A==", "dev": true, "license": "MIT", "engines": { @@ -620,13 +620,13 @@ } }, "node_modules/@commitlint/parse": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/parse/-/parse-19.5.0.tgz", - "integrity": "sha512-cZ/IxfAlfWYhAQV0TwcbdR1Oc0/r0Ik1GEessDJ3Lbuma/MRO8FRQX76eurcXtmhJC//rj52ZSZuXUg0oIX0Fw==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/parse/-/parse-19.8.0.tgz", + "integrity": "sha512-YNIKAc4EXvNeAvyeEnzgvm1VyAe0/b3Wax7pjJSwXuhqIQ1/t2hD3OYRXb6D5/GffIvaX82RbjD+nWtMZCLL7Q==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^19.5.0", + "@commitlint/types": "^19.8.0", "conventional-changelog-angular": "^7.0.0", "conventional-commits-parser": "^5.0.0" }, @@ -635,14 +635,14 @@ } }, "node_modules/@commitlint/read": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/read/-/read-19.5.0.tgz", - "integrity": "sha512-TjS3HLPsLsxFPQj6jou8/CZFAmOP2y+6V4PGYt3ihbQKTY1Jnv0QG28WRKl/d1ha6zLODPZqsxLEov52dhR9BQ==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/read/-/read-19.8.0.tgz", + "integrity": "sha512-6ywxOGYajcxK1y1MfzrOnwsXO6nnErna88gRWEl3qqOOP8MDu/DTeRkGLXBFIZuRZ7mm5yyxU5BmeUvMpNte5w==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/top-level": "^19.5.0", - "@commitlint/types": "^19.5.0", + "@commitlint/top-level": "^19.8.0", + "@commitlint/types": "^19.8.0", "git-raw-commits": "^4.0.0", "minimist": "^1.2.8", "tinyexec": "^0.3.0" @@ -652,14 +652,14 @@ } }, "node_modules/@commitlint/resolve-extends": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/resolve-extends/-/resolve-extends-19.5.0.tgz", - "integrity": "sha512-CU/GscZhCUsJwcKTJS9Ndh3AKGZTNFIOoQB2n8CmFnizE0VnEuJoum+COW+C1lNABEeqk6ssfc1Kkalm4bDklA==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/resolve-extends/-/resolve-extends-19.8.0.tgz", + "integrity": "sha512-CLanRQwuG2LPfFVvrkTrBR/L/DMy3+ETsgBqW1OvRxmzp/bbVJW0Xw23LnnExgYcsaFtos967lul1CsbsnJlzQ==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/config-validator": "^19.5.0", - "@commitlint/types": "^19.5.0", + "@commitlint/config-validator": "^19.8.0", + "@commitlint/types": "^19.8.0", "global-directory": "^4.0.1", "import-meta-resolve": "^4.0.0", "lodash.mergewith": "^4.6.2", @@ -670,25 +670,25 @@ } }, "node_modules/@commitlint/rules": { - "version": "19.6.0", - "resolved": "https://registry.npmjs.org/@commitlint/rules/-/rules-19.6.0.tgz", - "integrity": "sha512-1f2reW7lbrI0X0ozZMesS/WZxgPa4/wi56vFuJENBmed6mWq5KsheN/nxqnl/C23ioxpPO/PL6tXpiiFy5Bhjw==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/rules/-/rules-19.8.0.tgz", + "integrity": "sha512-IZ5IE90h6DSWNuNK/cwjABLAKdy8tP8OgGVGbXe1noBEX5hSsu00uRlLu6JuruiXjWJz2dZc+YSw3H0UZyl/mA==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/ensure": "^19.5.0", - "@commitlint/message": "^19.5.0", - "@commitlint/to-lines": "^19.5.0", - "@commitlint/types": "^19.5.0" + "@commitlint/ensure": "^19.8.0", + "@commitlint/message": "^19.8.0", + "@commitlint/to-lines": "^19.8.0", + "@commitlint/types": "^19.8.0" }, "engines": { "node": ">=v18" } }, "node_modules/@commitlint/to-lines": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/to-lines/-/to-lines-19.5.0.tgz", - "integrity": "sha512-R772oj3NHPkodOSRZ9bBVNq224DOxQtNef5Pl8l2M8ZnkkzQfeSTr4uxawV2Sd3ui05dUVzvLNnzenDBO1KBeQ==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/to-lines/-/to-lines-19.8.0.tgz", + "integrity": "sha512-3CKLUw41Cur8VMjh16y8LcsOaKbmQjAKCWlXx6B0vOUREplp6em9uIVhI8Cv934qiwkbi2+uv+mVZPnXJi1o9A==", "dev": true, "license": "MIT", "engines": { @@ -696,9 +696,9 @@ } }, "node_modules/@commitlint/top-level": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/top-level/-/top-level-19.5.0.tgz", - "integrity": "sha512-IP1YLmGAk0yWrImPRRc578I3dDUI5A2UBJx9FbSOjxe9sTlzFiwVJ+zeMLgAtHMtGZsC8LUnzmW1qRemkFU4ng==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/top-level/-/top-level-19.8.0.tgz", + "integrity": "sha512-Rphgoc/omYZisoNkcfaBRPQr4myZEHhLPx2/vTXNLjiCw4RgfPR1wEgUpJ9OOmDCiv5ZyIExhprNLhteqH4FuQ==", "dev": true, "license": "MIT", "dependencies": { @@ -709,9 +709,9 @@ } }, "node_modules/@commitlint/types": { - "version": "19.5.0", - "resolved": "https://registry.npmjs.org/@commitlint/types/-/types-19.5.0.tgz", - "integrity": "sha512-DSHae2obMSMkAtTBSOulg5X7/z+rGLxcXQIkg3OmWvY6wifojge5uVMydfhUvs7yQj+V7jNmRZ2Xzl8GJyqRgg==", + "version": "19.8.0", + "resolved": "https://registry.npmjs.org/@commitlint/types/-/types-19.8.0.tgz", + "integrity": "sha512-LRjP623jPyf3Poyfb0ohMj8I3ORyBDOwXAgxxVPbSD0unJuW2mJWeiRfaQinjtccMqC5Wy1HOMfa4btKjbNxbg==", "dev": true, "license": "MIT", "dependencies": { @@ -840,6 +840,18 @@ "node": ">= 4.0.0" } }, + "node_modules/@emnapi/core": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.3.1.tgz", + "integrity": "sha512-pVGjBIt1Y6gg3EJN8jTcfpP/+uuRksIo055oE/OBkDNcjZqVbfkWCksG1Jp4yZnj3iKWyWX8fdG/j6UDYPbFog==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.0.1", + "tslib": "^2.4.0" + } + }, "node_modules/@emnapi/runtime": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.3.1.tgz", @@ -851,6 +863,17 @@ "tslib": "^2.4.0" } }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.0.1.tgz", + "integrity": "sha512-iIBu7mwkq4UQGeMEM8bLwNK962nXdhodeScX4slfQnRhEMMzvYivHhutCIk8uojvmASXXPC2WNEjwxFWk72Oqw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@es-joy/jsdoccomment": { "version": "0.49.0", "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.49.0.tgz", @@ -1300,9 +1323,9 @@ } }, "node_modules/@eslint/compat": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.2.5.tgz", - "integrity": "sha512-5iuG/StT+7OfvhoBHPlmxkPA9om6aDUFgmD4+mWKAGsYt4vCe8rypneG03AuseyRHBmcCLXQtIH5S26tIoggLg==", + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.2.7.tgz", + "integrity": "sha512-xvv7hJE32yhegJ8xNAnb62ggiAwTYHBpUCWhRxEj/ksvgDJuSXfoDkBcRYaYNFiJ+jH0IE3K16hd+xXzhBgNbg==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1318,13 +1341,13 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.1.tgz", - "integrity": "sha512-fo6Mtm5mWyKjA/Chy1BYTdn5mGJoDNjC7C64ug20ADsRDGrA85bN3uK3MaKbeRkRuuIEAR5N33Jr1pbm411/PA==", + "version": "0.19.2", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz", + "integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/object-schema": "^2.1.5", + "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" }, @@ -1356,10 +1379,20 @@ "node": "*" } }, + "node_modules/@eslint/config-helpers": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.0.tgz", + "integrity": "sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/@eslint/core": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.10.0.tgz", - "integrity": "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw==", + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz", + "integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1370,9 +1403,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz", - "integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1452,9 +1485,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.19.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.19.0.tgz", - "integrity": "sha512-rbq9/g38qjfqFLOVPvwjIvFFdNziEC5S65jmjPw5r6A//QH+W91akh9irMwjDN8zKUTak6W9EsAv4m/7Wnw0UQ==", + "version": "9.23.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.23.0.tgz", + "integrity": "sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw==", "dev": true, "license": "MIT", "engines": { @@ -1462,9 +1495,9 @@ } }, "node_modules/@eslint/object-schema": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.5.tgz", - "integrity": "sha512-o0bhxnL89h5Bae5T318nFoFzGy+YE5i/gGkoPAgkmTVdRKTiv3p8JHevPiPaMwoloKfEiiaHlawCqaZMqRm+XQ==", + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", + "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1472,13 +1505,13 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz", - "integrity": "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A==", + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.7.tgz", + "integrity": "sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.10.0", + "@eslint/core": "^0.12.0", "levn": "^0.4.1" }, "engines": { @@ -1513,22 +1546,47 @@ "license": "MIT" }, "node_modules/@fontsource/inter": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.1.1.tgz", - "integrity": "sha512-weN3E+rq0Xb3Z93VHJ+Rc7WOQX9ETJPTAJ+gDcaMHtjft67L58sfS65rAjC5tZUXQ2FdZ/V1/sSzCwZ6v05kJw==", + "version": "5.2.5", + "resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.5.tgz", + "integrity": "sha512-kbsPKj0S4p44JdYRFiW78Td8Ge2sBVxi/PIBwmih+RpSXUdvS9nbs1HIiuUSPtRMi14CqLEZ/fbk7dj7vni1Sg==", "dev": true, - "license": "OFL-1.1" + "license": "OFL-1.1", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } }, "node_modules/@gerrit0/mini-shiki": { - "version": "1.26.1", - "resolved": "https://registry.npmjs.org/@gerrit0/mini-shiki/-/mini-shiki-1.26.1.tgz", - "integrity": "sha512-gHFUvv9f1fU2Piou/5Y7Sx5moYxcERbC7CXc6rkDLQTUBg5Dgg9L4u29/nHqfoQ3Y9R0h0BcOhd14uOEZIBP7Q==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@gerrit0/mini-shiki/-/mini-shiki-3.2.1.tgz", + "integrity": "sha512-HbzRC6MKB6U8kQhczz0APKPIzFHTrcqhaC7es2EXInq1SpjPVnpVSIsBe6hNoLWqqCx1n5VKiPXq6PfXnHZKOQ==", "dev": true, "license": "MIT", "dependencies": { - "@shikijs/engine-oniguruma": "^1.26.1", - "@shikijs/types": "^1.26.1", - "@shikijs/vscode-textmate": "^10.0.1" + "@shikijs/engine-oniguruma": "^3.2.1", + "@shikijs/types": "^3.2.1", + "@shikijs/vscode-textmate": "^10.0.2" + } + }, + "node_modules/@gerrit0/mini-shiki/node_modules/@shikijs/engine-oniguruma": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.2.1.tgz", + "integrity": "sha512-wZZAkayEn6qu2+YjenEoFqj0OyQI64EWsNR6/71d1EkG4sxEOFooowKivsWPpaWNBu3sxAG+zPz5kzBL/SsreQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.2.1", + "@shikijs/vscode-textmate": "^10.0.2" + } + }, + "node_modules/@gerrit0/mini-shiki/node_modules/@shikijs/types": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.2.1.tgz", + "integrity": "sha512-/NTWAk4KE2M8uac0RhOsIhYQf4pdU0OywQuYDGIGAJ6Mjunxl2cGiuLkvu4HLCMn+OTTLRWkjZITp+aYJv60yA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/vscode-textmate": "^10.0.2", + "@types/hast": "^3.0.4" } }, "node_modules/@huggingface/jinja": { @@ -1593,9 +1651,9 @@ } }, "node_modules/@humanwhocodes/retry": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz", - "integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==", + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.2.tgz", + "integrity": "sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1607,9 +1665,9 @@ } }, "node_modules/@iconify-json/octicon": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@iconify-json/octicon/-/octicon-1.2.2.tgz", - "integrity": "sha512-qEPkP9DMMay5uILzyaSmVksSMxRw9i2wSDREfB8OK20mPdSadusjLqD/u69GzpFpw6894c+WNmoq7WzN5KAPeg==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@iconify-json/octicon/-/octicon-1.2.5.tgz", + "integrity": "sha512-FVFjjmAC1R1Feis9k+COO3WvL3yumk0E7RxNvOEGT9K8CDp05k3Qg2lLlynBzZrSfuT1DhReJUUXFAj60ZHkpA==", "dev": true, "license": "MIT", "dependencies": { @@ -2134,6 +2192,19 @@ "integrity": "sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==", "license": "MIT" }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.7.tgz", + "integrity": "sha512-5yximcFK5FNompXfJFoWanu5l8v1hNGqNHh9du1xETp9HWk/B/PzvchX55WYOPaIeNglG8++68AAiauBAtbnzw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.3.1", + "@emnapi/runtime": "^1.3.1", + "@tybys/wasm-util": "^0.9.0" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -2173,43 +2244,47 @@ } }, "node_modules/@nolebase/ui": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/@nolebase/ui/-/ui-2.12.1.tgz", - "integrity": "sha512-nXjKfBzG9x+2Mp9+gQ1xUzTzGmJEHiCbBDzbnkALjIdoBPgwHC44GdpJL4WWwFdkHSQ9EuNOMD2Qn+7BosbYWQ==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/@nolebase/ui/-/ui-2.15.1.tgz", + "integrity": "sha512-IiKX2HQ8Ey7M3Nd27GZdoTrURTIq6DtkGOXc6mxrZV2REbMTYqYlby1Xwx6sfyOcSsDD7Ge62PlZze/yVIZsWg==", "dev": true, "license": "MIT", "dependencies": { - "@iconify-json/octicon": "^1.2.2", - "less": "^4.2.1", - "vitepress": "^1.5.0", + "@iconify-json/octicon": "^1.2.5", + "less": "^4.2.2", "vue": "^3.5.13" + }, + "peerDependencies": { + "vitepress": "^1.5.0 || ^2.0.0-alpha.1" } }, "node_modules/@nolebase/vitepress-plugin-git-changelog": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/@nolebase/vitepress-plugin-git-changelog/-/vitepress-plugin-git-changelog-2.12.1.tgz", - "integrity": "sha512-trKaDEYXliIGYJ028vGQePU5jhrzzxI8KWC48YnJv3AAZ/jQlk4BVRybD4jumc0miFzLamHe+80KIPJ5KZHV4g==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/@nolebase/vitepress-plugin-git-changelog/-/vitepress-plugin-git-changelog-2.15.1.tgz", + "integrity": "sha512-CG2rQNTvil+DztNe2/Z5tMLi+vNLIN0A/HE6YbyWdUup9965FqzdNxv5XKO1qVfb6hqCIMmPeIRTUJIq/SZfxw==", "dev": true, "license": "MIT", "dependencies": { - "@iconify-json/octicon": "^1.2.2", - "@nolebase/ui": "^2.12.1", + "@iconify-json/octicon": "^1.2.5", + "@nolebase/ui": "^2.15.1", "colorette": "^2.0.20", "date-fns": "^4.1.0", "defu": "^6.1.4", - "es-toolkit": "^1.31.0", + "es-toolkit": "^1.33.0", "execa": "^9.5.2", - "globby": "^14.0.2", + "globby": "^14.1.0", "gray-matter": "^4.0.3", - "less": "^4.2.1", - "uncrypto": "^0.1.3", - "vitepress": "^1.5.0" + "less": "^4.2.2", + "uncrypto": "^0.1.3" + }, + "peerDependencies": { + "vitepress": "^1.5.0 || ^2.0.0-alpha.1" } }, "node_modules/@nolebase/vitepress-plugin-og-image": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/@nolebase/vitepress-plugin-og-image/-/vitepress-plugin-og-image-2.12.1.tgz", - "integrity": "sha512-C/L9qXfVIu2dvX0L+886KV++t3eqg2GuzyzBt4JISBwjW5s07QOFo4Ahn3924xaUtJdW+X49mwiAd1cNvAb/pw==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/@nolebase/vitepress-plugin-og-image/-/vitepress-plugin-og-image-2.15.1.tgz", + "integrity": "sha512-QWfHVzXtxwCRJtYKpQsXSx8uZWGPIjlM6acjxugZ+uVQtvOGej1MKAkIrhb27lfgYpndaNchryBjKmNKL3XPQA==", "dev": true, "license": "MIT", "dependencies": { @@ -2217,58 +2292,50 @@ "colorette": "^2.0.20", "defu": "^6.1.4", "emoji-regex": "^10.4.0", - "fs-extra": "^11.2.0", + "fs-extra": "^11.3.0", "gray-matter": "^4.0.3", - "ora": "^8.1.1", + "ora": "^8.2.0", "rehype": "^13.0.2", "rehype-meta": "^4.0.1", "rehype-parse": "^9.0.1", "rehype-stringify": "^10.0.1", - "tinyglobby": "^0.2.10", + "tinyglobby": "^0.2.12", "unified": "^11.0.5", - "unist-util-visit": "^5.0.0", - "vitepress": "^1.5.0" - } - }, - "node_modules/@nolyfill/is-core-module": { - "version": "1.0.39", - "resolved": "https://registry.npmjs.org/@nolyfill/is-core-module/-/is-core-module-1.0.39.tgz", - "integrity": "sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.4.0" + "unist-util-visit": "^5.0.0" + }, + "peerDependencies": { + "vitepress": "^1.5.0 || ^2.0.0-alpha.1" } }, "node_modules/@octokit/app": { - "version": "15.1.2", - "resolved": "https://registry.npmjs.org/@octokit/app/-/app-15.1.2.tgz", - "integrity": "sha512-6aKmKvqnJKoVK+kx0mLlBMKmQYoziPw4Rd/PWr0j65QVQlrDXlu6hGU8fmTXt7tNkf/DsubdIaTT4fkoWzCh5g==", + "version": "15.1.5", + "resolved": "https://registry.npmjs.org/@octokit/app/-/app-15.1.5.tgz", + "integrity": "sha512-6cxLT9U8x7GGQ7lNWsKtFr4ccg9oLkGvowk373sX9HvX5U37kql5d55SzaQUxPE8PwgX2cqkzDm5NF5aPKevqg==", "license": "MIT", "dependencies": { - "@octokit/auth-app": "^7.1.4", - "@octokit/auth-unauthenticated": "^6.1.1", - "@octokit/core": "^6.1.3", - "@octokit/oauth-app": "^7.1.5", - "@octokit/plugin-paginate-rest": "^11.3.6", - "@octokit/types": "^13.6.2", - "@octokit/webhooks": "^13.4.2" + "@octokit/auth-app": "^7.1.5", + "@octokit/auth-unauthenticated": "^6.1.2", + "@octokit/core": "^6.1.4", + "@octokit/oauth-app": "^7.1.6", + "@octokit/plugin-paginate-rest": "^11.4.2", + "@octokit/types": "^13.8.0", + "@octokit/webhooks": "^13.6.1" }, "engines": { "node": ">= 18" } }, "node_modules/@octokit/auth-app": { - "version": "7.1.4", - "resolved": "https://registry.npmjs.org/@octokit/auth-app/-/auth-app-7.1.4.tgz", - "integrity": "sha512-5F+3l/maq9JfWQ4bV28jT2G/K8eu9OJ317yzXPTGe4Kw+lKDhFaS4dQ3Ltmb6xImKxfCQdqDqMXODhc9YLipLw==", + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/@octokit/auth-app/-/auth-app-7.1.5.tgz", + "integrity": "sha512-boklS4E6LpbA3nRx+SU2fRKRGZJdOGoSZne/i3Y0B5rfHOcGwFgcXrwDLdtbv4igfDSnAkZaoNBv1GYjPDKRNw==", "license": "MIT", "dependencies": { - "@octokit/auth-oauth-app": "^8.1.2", - "@octokit/auth-oauth-user": "^5.1.2", - "@octokit/request": "^9.1.4", - "@octokit/request-error": "^6.1.6", - "@octokit/types": "^13.6.2", + "@octokit/auth-oauth-app": "^8.1.3", + "@octokit/auth-oauth-user": "^5.1.3", + "@octokit/request": "^9.2.1", + "@octokit/request-error": "^6.1.7", + "@octokit/types": "^13.8.0", "toad-cache": "^3.7.0", "universal-github-app-jwt": "^2.2.0", "universal-user-agent": "^7.0.0" @@ -2278,14 +2345,14 @@ } }, "node_modules/@octokit/auth-oauth-app": { - "version": "8.1.2", - "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.2.tgz", - "integrity": "sha512-3woNZgq5/S6RS+9ZTq+JdymxVr7E0s4EYxF20ugQvgX3pomdPUL5r/XdTY9wALoBM2eHVy4ettr5fKpatyTyHw==", + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.3.tgz", + "integrity": "sha512-4e6OjVe5rZ8yBe8w7byBjpKtSXFuro7gqeGAAZc7QYltOF8wB93rJl2FE0a4U1Mt88xxPv/mS+25/0DuLk0Ewg==", "license": "MIT", "dependencies": { - "@octokit/auth-oauth-device": "^7.1.2", - "@octokit/auth-oauth-user": "^5.1.2", - "@octokit/request": "^9.1.4", + "@octokit/auth-oauth-device": "^7.1.3", + "@octokit/auth-oauth-user": "^5.1.3", + "@octokit/request": "^9.2.1", "@octokit/types": "^13.6.2", "universal-user-agent": "^7.0.0" }, @@ -2294,13 +2361,13 @@ } }, "node_modules/@octokit/auth-oauth-device": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-7.1.2.tgz", - "integrity": "sha512-gTOIzDeV36OhVfxCl69FmvJix7tJIiU6dlxuzLVAzle7fYfO8UDyddr9B+o4CFQVaMBLMGZ9ak2CWMYcGeZnPw==", + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-7.1.4.tgz", + "integrity": "sha512-yK35I9VGDGjYxu0NPZ9Rl+zXM/+DO/Hu1VR5FUNz+ZsU6i8B8oQ43TPwci9nuH8bAF6rQrKDNR9F0r0+kzYJhA==", "license": "MIT", "dependencies": { - "@octokit/oauth-methods": "^5.1.3", - "@octokit/request": "^9.1.4", + "@octokit/oauth-methods": "^5.1.4", + "@octokit/request": "^9.2.1", "@octokit/types": "^13.6.2", "universal-user-agent": "^7.0.0" }, @@ -2309,14 +2376,14 @@ } }, "node_modules/@octokit/auth-oauth-user": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-5.1.2.tgz", - "integrity": "sha512-PgVDDPJgZYb3qSEXK4moksA23tfn68zwSAsQKZ1uH6IV9IaNEYx35OXXI80STQaLYnmEE86AgU0tC1YkM4WjsA==", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-5.1.3.tgz", + "integrity": "sha512-zNPByPn9K7TC+OOHKGxU+MxrE9SZAN11UHYEFLsK2NRn3akJN2LHRl85q+Eypr3tuB2GrKx3rfj2phJdkYCvzw==", "license": "MIT", "dependencies": { - "@octokit/auth-oauth-device": "^7.1.2", - "@octokit/oauth-methods": "^5.1.2", - "@octokit/request": "^9.1.4", + "@octokit/auth-oauth-device": "^7.1.3", + "@octokit/oauth-methods": "^5.1.3", + "@octokit/request": "^9.2.1", "@octokit/types": "^13.6.2", "universal-user-agent": "^7.0.0" }, @@ -2334,12 +2401,12 @@ } }, "node_modules/@octokit/auth-unauthenticated": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/@octokit/auth-unauthenticated/-/auth-unauthenticated-6.1.1.tgz", - "integrity": "sha512-bGXqdN6RhSFHvpPq46SL8sN+F3odQ6oMNLWc875IgoqcC3qus+fOL2th6Tkl94wvdSTy8/OeHzWy/lZebmnhog==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/@octokit/auth-unauthenticated/-/auth-unauthenticated-6.1.2.tgz", + "integrity": "sha512-07DlUGcz/AAVdzu3EYfi/dOyMSHp9YsOxPl/MPmtlVXWiD//GlV8HgZsPhud94DEyx+RfrW0wSl46Lx+AWbOlg==", "license": "MIT", "dependencies": { - "@octokit/request-error": "^6.1.6", + "@octokit/request-error": "^6.1.7", "@octokit/types": "^13.6.2" }, "engines": { @@ -2347,15 +2414,15 @@ } }, "node_modules/@octokit/core": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.3.tgz", - "integrity": "sha512-z+j7DixNnfpdToYsOutStDgeRzJSMnbj8T1C/oQjB6Aa+kRfNjs/Fn7W6c8bmlt6mfy3FkgeKBRnDjxQow5dow==", + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.4.tgz", + "integrity": "sha512-lAS9k7d6I0MPN+gb9bKDt7X8SdxknYqAMh44S5L+lNqIN2NuV8nvv3g8rPp7MuRxcOpxpUIATWprO0C34a8Qmg==", "license": "MIT", "dependencies": { "@octokit/auth-token": "^5.0.0", "@octokit/graphql": "^8.1.2", - "@octokit/request": "^9.1.4", - "@octokit/request-error": "^6.1.6", + "@octokit/request": "^9.2.1", + "@octokit/request-error": "^6.1.7", "@octokit/types": "^13.6.2", "before-after-hook": "^3.0.2", "universal-user-agent": "^7.0.0" @@ -2365,9 +2432,9 @@ } }, "node_modules/@octokit/endpoint": { - "version": "10.1.2", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.2.tgz", - "integrity": "sha512-XybpFv9Ms4hX5OCHMZqyODYqGTZ3H6K6Vva+M9LR7ib/xr1y1ZnlChYv9H680y77Vd/i/k+thXApeRASBQkzhA==", + "version": "10.1.3", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.3.tgz", + "integrity": "sha512-nBRBMpKPhQUxCsQQeW+rCJ/OPSMcj3g0nfHn01zGYZXuNDvvXudF/TYY6APj5THlurerpFN4a/dQAIAaM6BYhA==", "license": "MIT", "dependencies": { "@octokit/types": "^13.6.2", @@ -2392,17 +2459,17 @@ } }, "node_modules/@octokit/oauth-app": { - "version": "7.1.5", - "resolved": "https://registry.npmjs.org/@octokit/oauth-app/-/oauth-app-7.1.5.tgz", - "integrity": "sha512-/Y2MiwWDlGUK4blKKfjJiwjzu/FzwKTTTfTZAAQ0QbdBIDEGJPWhOFH6muSN86zaa4tNheB4YS3oWIR2e4ydzA==", + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/@octokit/oauth-app/-/oauth-app-7.1.6.tgz", + "integrity": "sha512-OMcMzY2WFARg80oJNFwWbY51TBUfLH4JGTy119cqiDawSFXSIBujxmpXiKbGWQlvfn0CxE6f7/+c6+Kr5hI2YA==", "license": "MIT", "dependencies": { - "@octokit/auth-oauth-app": "^8.1.2", - "@octokit/auth-oauth-user": "^5.1.2", - "@octokit/auth-unauthenticated": "^6.1.1", - "@octokit/core": "^6.1.3", + "@octokit/auth-oauth-app": "^8.1.3", + "@octokit/auth-oauth-user": "^5.1.3", + "@octokit/auth-unauthenticated": "^6.1.2", + "@octokit/core": "^6.1.4", "@octokit/oauth-authorization-url": "^7.1.1", - "@octokit/oauth-methods": "^5.1.3", + "@octokit/oauth-methods": "^5.1.4", "@types/aws-lambda": "^8.10.83", "universal-user-agent": "^7.0.0" }, @@ -2420,14 +2487,14 @@ } }, "node_modules/@octokit/oauth-methods": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@octokit/oauth-methods/-/oauth-methods-5.1.3.tgz", - "integrity": "sha512-M+bDBi5H8FnH0xhCTg0m9hvcnppdDnxUqbZyOkxlLblKpLAR+eT2nbDPvJDp0eLrvJWA1I8OX0KHf/sBMQARRA==", + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@octokit/oauth-methods/-/oauth-methods-5.1.4.tgz", + "integrity": "sha512-Jc/ycnePClOvO1WL7tlC+TRxOFtyJBGuTDsL4dzXNiVZvzZdrPuNw7zHI3qJSUX2n6RLXE5L0SkFmYyNaVUFoQ==", "license": "MIT", "dependencies": { "@octokit/oauth-authorization-url": "^7.0.0", - "@octokit/request": "^9.1.4", - "@octokit/request-error": "^6.1.6", + "@octokit/request": "^9.2.1", + "@octokit/request-error": "^6.1.7", "@octokit/types": "^13.6.2" }, "engines": { @@ -2435,15 +2502,15 @@ } }, "node_modules/@octokit/openapi-types": { - "version": "23.0.1", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-23.0.1.tgz", - "integrity": "sha512-izFjMJ1sir0jn0ldEKhZ7xegCTj/ObmEDlEfpFrx4k/JyZSMRHbO3/rBwgE7f3m2DHt+RrNGIVw4wSmwnm3t/g==", + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", "license": "MIT" }, "node_modules/@octokit/openapi-webhooks-types": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-8.5.1.tgz", - "integrity": "sha512-i3h1b5zpGSB39ffBbYdSGuAd0NhBAwPyA3QV3LYi/lx4lsbZiu7u2UHgXVUR6EpvOI8REOuVh1DZTRfHoJDvuQ==", + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-10.3.0.tgz", + "integrity": "sha512-Dog+FB4pvsfcGZ9CFyYwb7I6K4QkzCDuN0H3hh5Nr9mMQ4wbBceqUKyJNKN2A0SQ6j4UU3CYNuvfoX9mQlhLUQ==", "license": "MIT" }, "node_modules/@octokit/plugin-paginate-graphql": { @@ -2459,12 +2526,12 @@ } }, "node_modules/@octokit/plugin-paginate-rest": { - "version": "11.4.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.0.tgz", - "integrity": "sha512-ttpGck5AYWkwMkMazNCZMqxKqIq1fJBNxBfsFwwfyYKTf914jKkLF0POMS3YkPBwp5g1c2Y4L79gDz01GhSr1g==", + "version": "11.6.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.6.0.tgz", + "integrity": "sha512-n5KPteiF7pWKgBIBJSk8qzoZWcUkza2O6A0za97pMGVrGfPdltxrfmfF5GucHYvHGZD8BdaZmmHGz5cX/3gdpw==", "license": "MIT", "dependencies": { - "@octokit/types": "^13.7.0" + "@octokit/types": "^13.10.0" }, "engines": { "node": ">= 18" @@ -2474,12 +2541,12 @@ } }, "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "13.3.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.0.tgz", - "integrity": "sha512-LUm44shlmkp/6VC+qQgHl3W5vzUP99ZM54zH6BuqkJK4DqfFLhegANd+fM4YRLapTvPm4049iG7F3haANKMYvQ==", + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.5.0.tgz", + "integrity": "sha512-9Pas60Iv9ejO3WlAX3maE1+38c5nqbJXV5GrncEfkndIpZrJ/WPMRd2xYDcPPEt5yzpxcjw9fWNoPhsSGzqKqw==", "license": "MIT", "dependencies": { - "@octokit/types": "^13.7.0" + "@octokit/types": "^13.10.0" }, "engines": { "node": ">= 18" @@ -2489,12 +2556,12 @@ } }, "node_modules/@octokit/plugin-retry": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-7.1.3.tgz", - "integrity": "sha512-8nKOXvYWnzv89gSyIvgFHmCBAxfQAOPRlkacUHL9r5oWtp5Whxl8Skb2n3ACZd+X6cYijD6uvmrQuPH/UCL5zQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-7.2.0.tgz", + "integrity": "sha512-psMbEYb/Fh+V+ZaFo8J16QiFz4sVTv3GntCSU+hYqzHiMdc3P+hhHLVv+dJt0PGIPAGoIA5u+J2DCJdK6lEPsQ==", "license": "MIT", "dependencies": { - "@octokit/request-error": "^6.1.6", + "@octokit/request-error": "^6.1.7", "@octokit/types": "^13.6.2", "bottleneck": "^2.15.3" }, @@ -2522,13 +2589,13 @@ } }, "node_modules/@octokit/request": { - "version": "9.1.4", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.1.4.tgz", - "integrity": "sha512-tMbOwGm6wDII6vygP3wUVqFTw3Aoo0FnVQyhihh8vVq12uO3P+vQZeo2CKMpWtPSogpACD0yyZAlVlQnjW71DA==", + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.2.tgz", + "integrity": "sha512-dZl0ZHx6gOQGcffgm1/Sf6JfEpmh34v3Af2Uci02vzUYz6qEN6zepoRtmybWXIGXFIK8K9ylE3b+duCWqhArtg==", "license": "MIT", "dependencies": { - "@octokit/endpoint": "^10.0.0", - "@octokit/request-error": "^6.0.1", + "@octokit/endpoint": "^10.1.3", + "@octokit/request-error": "^6.1.7", "@octokit/types": "^13.6.2", "fast-content-type-parse": "^2.0.0", "universal-user-agent": "^7.0.2" @@ -2538,9 +2605,9 @@ } }, "node_modules/@octokit/request-error": { - "version": "6.1.6", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.6.tgz", - "integrity": "sha512-pqnVKYo/at0NuOjinrgcQYpEbv4snvP3bKMRqHaD9kIsk9u1LCpb2smHZi8/qJfgeNqLo5hNW4Z7FezNdEo0xg==", + "version": "6.1.7", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.7.tgz", + "integrity": "sha512-69NIppAwaauwZv6aOzb+VVLwt+0havz9GT5YplkeJv7fG7a40qpLt/yZKyiDxAhgz0EtgNdNcb96Z0u+Zyuy2g==", "license": "MIT", "dependencies": { "@octokit/types": "^13.6.2" @@ -2550,32 +2617,32 @@ } }, "node_modules/@octokit/types": { - "version": "13.7.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.7.0.tgz", - "integrity": "sha512-BXfRP+3P3IN6fd4uF3SniaHKOO4UXWBfkdR3vA8mIvaoO/wLjGN5qivUtW0QRitBHHMcfC41SLhNVYIZZE+wkA==", + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", "license": "MIT", "dependencies": { - "@octokit/openapi-types": "^23.0.1" + "@octokit/openapi-types": "^24.2.0" } }, "node_modules/@octokit/webhooks": { - "version": "13.4.3", - "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-13.4.3.tgz", - "integrity": "sha512-Brjp2TJiTvLV8unrGWC46QgsnH641fTvH32Vp88L6Y5GkMcKMp+YLdRz/EP722GxKM0S4vbeNqAc6QeEUr8oWA==", + "version": "13.7.5", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-13.7.5.tgz", + "integrity": "sha512-qmmu4cfKmm58RWyDPUDoI7ls9JWw88qbqPzEi+TDBaSirHQPsixxdPU4OjPDcnDnjee7JXv525yv4qNN3BRlyg==", "license": "MIT", "dependencies": { - "@octokit/openapi-webhooks-types": "8.5.1", - "@octokit/request-error": "^6.1.6", - "@octokit/webhooks-methods": "^5.0.0" + "@octokit/openapi-webhooks-types": "10.3.0", + "@octokit/request-error": "^6.1.7", + "@octokit/webhooks-methods": "^5.1.1" }, "engines": { "node": ">= 18" } }, "node_modules/@octokit/webhooks-methods": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-5.1.0.tgz", - "integrity": "sha512-yFZa3UH11VIxYnnoOYCVoJ3q4ChuSOk2IVBBQ0O3xtKX4x9bmKb/1t+Mxixv2iUhzMdOl1qeWJqEhouXXzB3rQ==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-5.1.1.tgz", + "integrity": "sha512-NGlEHZDseJTCj8TMMFehzwa9g7On4KJMPVHDSrHxCQumL6uSQR8wIkP/qesv52fXqV1BPf4pTxwtS31ldAt9Xg==", "license": "MIT", "engines": { "node": ">= 18" @@ -3389,142 +3456,67 @@ } }, "node_modules/@semantic-release/error": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-3.0.0.tgz", - "integrity": "sha512-5hiM4Un+tpl4cKw3lV4UgzJj+SmfNIDCLLw0TepzQxz9ZGV5ixnqkzIVF+3tp0ZHgcMKE+VNGHJjEeyFG2dcSw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-4.0.0.tgz", + "integrity": "sha512-mgdxrHTLOjOddRVYIYDo0fR3/v61GNN1YGkfbrjuIKg/uMgCd+Qzo3UAXJ+woLQQpos4pl5Esuw5A7AoNlzjUQ==", "dev": true, "license": "MIT", "engines": { - "node": ">=14.17" + "node": ">=18" } }, "node_modules/@semantic-release/exec": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/@semantic-release/exec/-/exec-6.0.3.tgz", - "integrity": "sha512-bxAq8vLOw76aV89vxxICecEa8jfaWwYITw6X74zzlO0mc/Bgieqx9kBRz9z96pHectiTAtsCwsQcUyLYWnp3VQ==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@semantic-release/exec/-/exec-7.0.3.tgz", + "integrity": "sha512-uNWwPNtWi3WTcTm3fWfFQEuj8otOvwoS5m9yo2jSVHuvqdZNsOWmuL0/FqcVyZnCI32fxyYV0G7PPb/TzCH6jw==", "dev": true, "license": "MIT", "dependencies": { - "@semantic-release/error": "^3.0.0", + "@semantic-release/error": "^4.0.0", "aggregate-error": "^3.0.0", "debug": "^4.0.0", - "execa": "^5.0.0", - "lodash": "^4.17.4", - "parse-json": "^5.0.0" + "execa": "^9.0.0", + "lodash-es": "^4.17.21", + "parse-json": "^8.0.0" }, "engines": { - "node": ">=14.17" + "node": ">=20.8.1" }, "peerDependencies": { - "semantic-release": ">=18.0.0" - } - }, - "node_modules/@semantic-release/exec/node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/@semantic-release/exec/node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@semantic-release/exec/node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" + "semantic-release": ">=24.1.0" } }, - "node_modules/@semantic-release/exec/node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "node_modules/@semantic-release/exec/node_modules/index-to-position": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/index-to-position/-/index-to-position-1.0.0.tgz", + "integrity": "sha512-sCO7uaLVhRJ25vz1o8s9IFM3nVS4DkuQnyjMwiQPKvQuBYBDmb8H7zx8ki7nVh4HJQOdVWebyvLE0qt+clruxA==", "dev": true, "license": "MIT", "engines": { - "node": ">=8" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@semantic-release/exec/node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@semantic-release/exec/node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "node_modules/@semantic-release/exec/node_modules/parse-json": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-8.2.0.tgz", + "integrity": "sha512-eONBZy4hm2AgxjNFd8a4nyDJnzUAH0g34xSQAwWEVGCjdZ4ZL7dKZBfq267GWP/JaS9zW62Xs2FeAdDvpHHJGQ==", "dev": true, "license": "MIT", "dependencies": { - "mimic-fn": "^2.1.0" + "@babel/code-frame": "^7.26.2", + "index-to-position": "^1.0.0", + "type-fest": "^4.37.0" }, "engines": { - "node": ">=6" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@semantic-release/exec/node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/@semantic-release/exec/node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/@semantic-release/github": { "version": "11.0.1", "resolved": "https://registry.npmjs.org/@semantic-release/github/-/github-11.0.1.tgz", @@ -3556,16 +3548,6 @@ "semantic-release": ">=24.1.0" } }, - "node_modules/@semantic-release/github/node_modules/@semantic-release/error": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-4.0.0.tgz", - "integrity": "sha512-mgdxrHTLOjOddRVYIYDo0fR3/v61GNN1YGkfbrjuIKg/uMgCd+Qzo3UAXJ+woLQQpos4pl5Esuw5A7AoNlzjUQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, "node_modules/@semantic-release/github/node_modules/aggregate-error": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-5.0.0.tgz", @@ -3653,16 +3635,6 @@ "semantic-release": ">=20.1.0" } }, - "node_modules/@semantic-release/npm/node_modules/@semantic-release/error": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-4.0.0.tgz", - "integrity": "sha512-mgdxrHTLOjOddRVYIYDo0fR3/v61GNN1YGkfbrjuIKg/uMgCd+Qzo3UAXJ+woLQQpos4pl5Esuw5A7AoNlzjUQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, "node_modules/@semantic-release/npm/node_modules/aggregate-error": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-5.0.0.tgz", @@ -3978,27 +3950,165 @@ } }, "node_modules/@shikijs/vitepress-twoslash": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@shikijs/vitepress-twoslash/-/vitepress-twoslash-2.2.0.tgz", - "integrity": "sha512-B9bWj6IzSY5lmvzN1DwmrU/4D4Ay6Ds9JAodVGRI8PcpT0DQ+5FpECg9B8ybyz4DWTo+4h/8IsMDrbM8TVHMQQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/vitepress-twoslash/-/vitepress-twoslash-3.2.1.tgz", + "integrity": "sha512-lQNmw3v9gBFYdxfCW5upcDwfQu8BTJz4D4VPN72XElBA6gW82PKNmzZDoHhVcEmgm/cRQpPjpiY6CYChtv/AEw==", "dev": true, "license": "MIT", "dependencies": { "@shikijs/twoslash": "", "floating-vue": "^5.2.2", "mdast-util-from-markdown": "^2.0.2", - "mdast-util-gfm": "^3.0.0", + "mdast-util-gfm": "^3.1.0", "mdast-util-to-hast": "^13.2.0", - "shiki": "2.2.0", - "twoslash": "^0.2.12", - "twoslash-vue": "^0.2.12", + "shiki": "3.2.1", + "twoslash": "^0.3.1", + "twoslash-vue": "^0.3.1", "vue": "^3.5.13" } }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/@shikijs/core": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.2.1.tgz", + "integrity": "sha512-FhsdxMWYu/C11sFisEp7FMGBtX/OSSbnXZDMBhGuUDBNTdsoZlMSgQv5f90rwvzWAdWIW6VobD+G3IrazxA6dQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.2.1", + "@shikijs/vscode-textmate": "^10.0.2", + "@types/hast": "^3.0.4", + "hast-util-to-html": "^9.0.5" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/@shikijs/engine-javascript": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.2.1.tgz", + "integrity": "sha512-eMdcUzN3FMQYxOmRf2rmU8frikzoSHbQDFH2hIuXsrMO+IBOCI9BeeRkCiBkcLDHeRKbOCtYMJK3D6U32ooU9Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.2.1", + "@shikijs/vscode-textmate": "^10.0.2", + "oniguruma-to-es": "^4.1.0" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/@shikijs/engine-oniguruma": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.2.1.tgz", + "integrity": "sha512-wZZAkayEn6qu2+YjenEoFqj0OyQI64EWsNR6/71d1EkG4sxEOFooowKivsWPpaWNBu3sxAG+zPz5kzBL/SsreQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.2.1", + "@shikijs/vscode-textmate": "^10.0.2" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/@shikijs/langs": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.2.1.tgz", + "integrity": "sha512-If0iDHYRSGbihiA8+7uRsgb1er1Yj11pwpX1c6HLYnizDsKAw5iaT3JXj5ZpaimXSWky/IhxTm7C6nkiYVym+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.2.1" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/@shikijs/themes": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.2.1.tgz", + "integrity": "sha512-k5DKJUT8IldBvAm8WcrDT5+7GA7se6lLksR+2E3SvyqGTyFMzU2F9Gb7rmD+t+Pga1MKrYFxDIeyWjMZWM6uBQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.2.1" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/@shikijs/types": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.2.1.tgz", + "integrity": "sha512-/NTWAk4KE2M8uac0RhOsIhYQf4pdU0OywQuYDGIGAJ6Mjunxl2cGiuLkvu4HLCMn+OTTLRWkjZITp+aYJv60yA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/vscode-textmate": "^10.0.2", + "@types/hast": "^3.0.4" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/oniguruma-to-es": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-4.1.0.tgz", + "integrity": "sha512-SNwG909cSLo4vPyyPbU/VJkEc9WOXqu2ycBlfd1UCXLqk1IijcQktSBb2yRQ2UFPsDhpkaf+C1dtT3PkLK/yWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex-xs": "^1.0.0", + "oniguruma-parser": "^0.5.4", + "regex": "^6.0.1", + "regex-recursion": "^6.0.2" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/regex/-/regex-6.0.1.tgz", + "integrity": "sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==", + "dev": true, + "license": "MIT", + "dependencies": { + "regex-utilities": "^2.3.0" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/regex-recursion": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-6.0.2.tgz", + "integrity": "sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==", + "dev": true, + "license": "MIT", + "dependencies": { + "regex-utilities": "^2.3.0" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/shiki": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-3.2.1.tgz", + "integrity": "sha512-VML/2o1/KGYkEf/stJJ+s9Ypn7jUKQPomGLGYso4JJFMFxVDyPNsjsI3MB3KLjlMOeH44gyaPdXC6rik2WXvUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/core": "3.2.1", + "@shikijs/engine-javascript": "3.2.1", + "@shikijs/engine-oniguruma": "3.2.1", + "@shikijs/langs": "3.2.1", + "@shikijs/themes": "3.2.1", + "@shikijs/types": "3.2.1", + "@shikijs/vscode-textmate": "^10.0.2", + "@types/hast": "^3.0.4" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/twoslash": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/twoslash/-/twoslash-0.3.1.tgz", + "integrity": "sha512-OGqMTGvqXTcb92YQdwGfEdK0nZJA64Aj/ChLOelbl3TfYch2IoBST0Yx4C0LQ7Lzyqm9RpgcpgDxeXQIz4p2Kg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript/vfs": "^1.6.1", + "twoslash-protocol": "0.3.1" + }, + "peerDependencies": { + "typescript": "^5.5.0" + } + }, + "node_modules/@shikijs/vitepress-twoslash/node_modules/twoslash-protocol": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/twoslash-protocol/-/twoslash-protocol-0.3.1.tgz", + "integrity": "sha512-BMePTL9OkuNISSyyMclBBhV2s9++DiOCyhhCoV5Kaht6eaWLwVjCCUJHY33eZJPsyKeZYS8Wzz0h+XI01VohVw==", + "dev": true, + "license": "MIT" + }, "node_modules/@shikijs/vscode-textmate": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.1.tgz", - "integrity": "sha512-fTIQwLF+Qhuws31iw7Ncl1R3HUDtGwIipiJ9iU+UsDUwMhegFcQKQHd51nZjb7CArq0MvON8rbgCGQYWHUKAdg==", + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz", + "integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==", "dev": true, "license": "MIT" }, @@ -4029,13 +4139,13 @@ } }, "node_modules/@stylistic/eslint-plugin": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-3.0.1.tgz", - "integrity": "sha512-rQ3tcT5N2cynofJfbjUsnL4seoewTaOVBLyUEwtNldo7iNMPo3h/GUQk+Cl3iHEWwRxjq2wuH6q0FufQrbVL1A==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-4.2.0.tgz", + "integrity": "sha512-8hXezgz7jexGHdo5WN6JBEIPHCSFyyU4vgbxevu4YLVS5vl+sxqAAGyXSzfNDyR6xMNSH5H1x67nsXcYMOHtZA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/utils": "^8.13.0", + "@typescript-eslint/utils": "^8.23.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "estraverse": "^5.3.0", @@ -4045,7 +4155,7 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "peerDependencies": { - "eslint": ">=8.40.0" + "eslint": ">=9.0.0" } }, "node_modules/@szmarczak/http-timer": { @@ -4074,6 +4184,17 @@ "url": "https://github.com/tinyhttp/tinyhttp?sponsor=1" } }, + "node_modules/@tybys/wasm-util": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.9.0.tgz", + "integrity": "sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@types/async-retry": { "version": "1.4.9", "resolved": "https://registry.npmjs.org/@types/async-retry/-/async-retry-1.4.9.tgz", @@ -4252,9 +4373,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.12.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.12.0.tgz", - "integrity": "sha512-Fll2FZ1riMjNmlmJOdAyY5pUbkftXslB5DgEzlIuNaiWhXd00FhWxVC/r4yV/4wBb9JfImTu+jiSvXTkJ7F/gA==", + "version": "22.13.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.11.tgz", + "integrity": "sha512-iEUCUJoU0i3VnrCmgoWCXttklWcvoCIx4jzcP22fioIVSdTmjgoEvmAO/QPw6TcS9k5FrNgn4w7q5lGOd1CT5g==", "dev": true, "license": "MIT", "dependencies": { @@ -4359,21 +4480,21 @@ } }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.22.0.tgz", - "integrity": "sha512-4Uta6REnz/xEJMvwf72wdUnC3rr4jAQf5jnTkeRQ9b6soxLxhDEbS/pfMPoJLDfFPNVRdryqWUIV/2GZzDJFZw==", + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.27.0.tgz", + "integrity": "sha512-4henw4zkePi5p252c8ncBLzLce52SEUz2Ebj8faDnuUXz2UuHEONYcJ+G0oaCF+bYCWVZtrGzq3FD7YXetmnSA==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.22.0", - "@typescript-eslint/type-utils": "8.22.0", - "@typescript-eslint/utils": "8.22.0", - "@typescript-eslint/visitor-keys": "8.22.0", + "@typescript-eslint/scope-manager": "8.27.0", + "@typescript-eslint/type-utils": "8.27.0", + "@typescript-eslint/utils": "8.27.0", + "@typescript-eslint/visitor-keys": "8.27.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", - "ts-api-utils": "^2.0.0" + "ts-api-utils": "^2.0.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -4385,7 +4506,7 @@ "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "typescript": ">=4.8.4 <5.9.0" } }, "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { @@ -4399,16 +4520,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.22.0.tgz", - "integrity": "sha512-MqtmbdNEdoNxTPzpWiWnqNac54h8JDAmkWtJExBVVnSrSmi9z+sZUt0LfKqk9rjqmKOIeRhO4fHHJ1nQIjduIQ==", + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.27.0.tgz", + "integrity": "sha512-XGwIabPallYipmcOk45DpsBSgLC64A0yvdAkrwEzwZ2viqGqRUJ8eEYoPz0CWnutgAFbNMPdsGGvzjSmcWVlEA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.22.0", - "@typescript-eslint/types": "8.22.0", - "@typescript-eslint/typescript-estree": "8.22.0", - "@typescript-eslint/visitor-keys": "8.22.0", + "@typescript-eslint/scope-manager": "8.27.0", + "@typescript-eslint/types": "8.27.0", + "@typescript-eslint/typescript-estree": "8.27.0", + "@typescript-eslint/visitor-keys": "8.27.0", "debug": "^4.3.4" }, "engines": { @@ -4420,18 +4541,18 @@ }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "typescript": ">=4.8.4 <5.9.0" } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.22.0.tgz", - "integrity": "sha512-/lwVV0UYgkj7wPSw0o8URy6YI64QmcOdwHuGuxWIYznO6d45ER0wXUbksr9pYdViAofpUCNJx/tAzNukgvaaiQ==", + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.27.0.tgz", + "integrity": "sha512-8oI9GwPMQmBryaaxG1tOZdxXVeMDte6NyJA4i7/TWa4fBwgnAXYlIQP+uYOeqAaLJ2JRxlG9CAyL+C+YE9Xknw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.22.0", - "@typescript-eslint/visitor-keys": "8.22.0" + "@typescript-eslint/types": "8.27.0", + "@typescript-eslint/visitor-keys": "8.27.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -4442,16 +4563,16 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.22.0.tgz", - "integrity": "sha512-NzE3aB62fDEaGjaAYZE4LH7I1MUwHooQ98Byq0G0y3kkibPJQIXVUspzlFOmOfHhiDLwKzMlWxaNv+/qcZurJA==", + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.27.0.tgz", + "integrity": "sha512-wVArTVcz1oJOIEJxui/nRhV0TXzD/zMSOYi/ggCfNq78EIszddXcJb7r4RCp/oBrjt8n9A0BSxRMKxHftpDxDA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "8.22.0", - "@typescript-eslint/utils": "8.22.0", + "@typescript-eslint/typescript-estree": "8.27.0", + "@typescript-eslint/utils": "8.27.0", "debug": "^4.3.4", - "ts-api-utils": "^2.0.0" + "ts-api-utils": "^2.0.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -4462,15 +4583,84 @@ }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "typescript": ">=4.8.4 <5.9.0" } }, "node_modules/@typescript-eslint/types": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.22.0.tgz", - "integrity": "sha512-0S4M4baNzp612zwpD4YOieP3VowOARgK2EkN/GBn95hpyF8E2fbMT55sRHWBq+Huaqk3b3XK+rxxlM8sPgGM6A==", + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.27.0.tgz", + "integrity": "sha512-/6cp9yL72yUHAYq9g6DsAU+vVfvQmd1a8KyA81uvfDE21O2DwQ/qxlM4AR8TSdAu+kJLBDrEHKC5/W2/nxsY0A==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.27.0.tgz", + "integrity": "sha512-BnKq8cqPVoMw71O38a1tEb6iebEgGA80icSxW7g+kndx0o6ot6696HjG7NdgfuAVmVEtwXUr3L8R9ZuVjoQL6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.27.0", + "@typescript-eslint/visitor-keys": "8.27.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.27.0.tgz", + "integrity": "sha512-njkodcwH1yvmo31YWgRHNb/x1Xhhq4/m81PhtvmRngD8iHPehxffz1SNCO+kwaePhATC+kOa/ggmvPoPza5i0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.27.0", + "@typescript-eslint/types": "8.27.0", + "@typescript-eslint/typescript-estree": "8.27.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.27.0.tgz", + "integrity": "sha512-WsXQwMkILJvffP6z4U3FYJPlbf/j07HIxmDjZpbNvBJkMfvwXj5ACRkkHwBDvLBbDbtX5TdU64/rcvKJ/vuInQ==", "dev": true, "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.27.0", + "eslint-visitor-keys": "^4.2.0" + }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -4479,94 +4669,182 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.22.0.tgz", - "integrity": "sha512-SJX99NAS2ugGOzpyhMza/tX+zDwjvwAtQFLsBo3GQxiGcvaKlqGBkmZ+Y1IdiSi9h4Q0Lr5ey+Cp9CGWNY/F/w==", + "node_modules/@typescript/vfs": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@typescript/vfs/-/vfs-1.6.1.tgz", + "integrity": "sha512-JwoxboBh7Oz1v38tPbkrZ62ZXNHAk9bJ7c9x0eI5zBfBnBYGhURdbnh7Z4smN/MV48Y5OCcZb58n972UtbazsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.1" + }, + "peerDependencies": { + "typescript": "*" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.1.tgz", + "integrity": "sha512-fEzPV3hSkSMltkw152tJKNARhOupqbH96MZWyRjNaYZOMIzbrTeQDG+MTc6Mr2pgzFQzFxAfmhGDNP5QK++2ZA==", + "dev": true, + "license": "ISC" + }, + "node_modules/@unrs/rspack-resolver-binding-darwin-arm64": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-darwin-arm64/-/rspack-resolver-binding-darwin-arm64-1.2.2.tgz", + "integrity": "sha512-i7z0B+C0P8Q63O/5PXJAzeFtA1ttY3OR2VSJgGv18S+PFNwD98xHgAgPOT1H5HIV6jlQP8Avzbp09qxJUdpPNw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-darwin-x64": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-darwin-x64/-/rspack-resolver-binding-darwin-x64-1.2.2.tgz", + "integrity": "sha512-YEdFzPjIbDUCfmehC6eS+AdJYtFWY35YYgWUnqqTM2oe/N58GhNy5yRllxYhxwJ9GcfHoNc6Ubze1yjkNv+9Qg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-freebsd-x64": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-freebsd-x64/-/rspack-resolver-binding-freebsd-x64-1.2.2.tgz", + "integrity": "sha512-TU4ntNXDgPN2giQyyzSnGWf/dVCem5lvwxg0XYvsvz35h5H19WrhTmHgbrULMuypCB3aHe1enYUC9rPLDw45mA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-arm-gnueabihf": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-arm-gnueabihf/-/rspack-resolver-binding-linux-arm-gnueabihf-1.2.2.tgz", + "integrity": "sha512-ik3w4/rU6RujBvNWiDnKdXi1smBhqxEDhccNi/j2rHaMjm0Fk49KkJ6XKsoUnD2kZ5xaMJf9JjailW/okfUPIw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-arm64-gnu": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-arm64-gnu/-/rspack-resolver-binding-linux-arm64-gnu-1.2.2.tgz", + "integrity": "sha512-fp4Azi8kHz6TX8SFmKfyScZrMLfp++uRm2srpqRjsRZIIBzH74NtSkdEUHImR4G7f7XJ+sVZjCc6KDDK04YEpQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-arm64-musl": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-arm64-musl/-/rspack-resolver-binding-linux-arm64-musl-1.2.2.tgz", + "integrity": "sha512-gMiG3DCFioJxdGBzhlL86KcFgt9HGz0iDhw0YVYPsShItpN5pqIkNrI+L/Q/0gfDiGrfcE0X3VANSYIPmqEAlQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-x64-gnu": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-x64-gnu/-/rspack-resolver-binding-linux-x64-gnu-1.2.2.tgz", + "integrity": "sha512-n/4n2CxaUF9tcaJxEaZm+lqvaw2gflfWQ1R9I7WQgYkKEKbRKbpG/R3hopYdUmLSRI4xaW1Cy0Bz40eS2Yi4Sw==", + "cpu": [ + "x64" + ], "dev": true, "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.22.0", - "@typescript-eslint/visitor-keys": "8.22.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^2.0.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.8.0" - } + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@typescript-eslint/utils": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.22.0.tgz", - "integrity": "sha512-T8oc1MbF8L+Bk2msAvCUzjxVB2Z2f+vXYfcucE2wOmYs7ZUwco5Ep0fYZw8quNwOiw9K8GYVL+Kgc2pETNTLOg==", + "node_modules/@unrs/rspack-resolver-binding-linux-x64-musl": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-x64-musl/-/rspack-resolver-binding-linux-x64-musl-1.2.2.tgz", + "integrity": "sha512-cHyhAr6rlYYbon1L2Ag449YCj3p6XMfcYTP0AQX+KkQo025d1y/VFtPWvjMhuEsE2lLvtHm7GdJozj6BOMtzVg==", + "cpu": [ + "x64" + ], "dev": true, "license": "MIT", - "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.22.0", - "@typescript-eslint/types": "8.22.0", - "@typescript-eslint/typescript-estree": "8.22.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" - } + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.22.0.tgz", - "integrity": "sha512-AWpYAXnUgvLNabGTy3uBylkgZoosva/miNd1I8Bz3SjotmQPbVqhO4Cczo8AsZ44XVErEBPr/CRSgaj8sG7g0w==", + "node_modules/@unrs/rspack-resolver-binding-wasm32-wasi": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-wasm32-wasi/-/rspack-resolver-binding-wasm32-wasi-1.2.2.tgz", + "integrity": "sha512-eogDKuICghDLGc32FtP+WniG38IB1RcGOGz0G3z8406dUdjJvxfHGuGs/dSlM9YEp/v0lEqhJ4mBu6X2nL9pog==", + "cpu": [ + "wasm32" + ], "dev": true, "license": "MIT", + "optional": true, "dependencies": { - "@typescript-eslint/types": "8.22.0", - "eslint-visitor-keys": "^4.2.0" + "@napi-rs/wasm-runtime": "^0.2.7" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "node": ">=14.0.0" } }, - "node_modules/@typescript/vfs": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@typescript/vfs/-/vfs-1.6.0.tgz", - "integrity": "sha512-hvJUjNVeBMp77qPINuUvYXj4FyWeeMMKZkxEATEU3hqBAQ7qdTBCUFT7Sp0Zu0faeEtFf+ldXxMEDr/bk73ISg==", + "node_modules/@unrs/rspack-resolver-binding-win32-arm64-msvc": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-win32-arm64-msvc/-/rspack-resolver-binding-win32-arm64-msvc-1.2.2.tgz", + "integrity": "sha512-7sWRJumhpXSi2lccX8aQpfFXHsSVASdWndLv8AmD8nDRA/5PBi8IplQVZNx2mYRx6+Bp91Z00kuVqpXO9NfCTg==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", - "dependencies": { - "debug": "^4.1.1" - }, - "peerDependencies": { - "typescript": "*" - } + "optional": true, + "os": [ + "win32" + ] }, - "node_modules/@ungap/structured-clone": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.1.tgz", - "integrity": "sha512-fEzPV3hSkSMltkw152tJKNARhOupqbH96MZWyRjNaYZOMIzbrTeQDG+MTc6Mr2pgzFQzFxAfmhGDNP5QK++2ZA==", + "node_modules/@unrs/rspack-resolver-binding-win32-x64-msvc": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-win32-x64-msvc/-/rspack-resolver-binding-win32-x64-msvc-1.2.2.tgz", + "integrity": "sha512-hewo/UMGP1a7O6FG/ThcPzSJdm/WwrYDNkdGgWl6M18H6K6MSitklomWpT9MUtT5KGj++QJb06va/14QBC4pvw==", + "cpu": [ + "x64" + ], "dev": true, - "license": "ISC" + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] }, "node_modules/@vitejs/plugin-vue": { "version": "5.2.1", @@ -4583,9 +4861,9 @@ } }, "node_modules/@vitest/coverage-v8": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.0.4.tgz", - "integrity": "sha512-f0twgRCHgbs24Dp8cLWagzcObXMcuKtAwgxjJV/nnysPAJJk1JiKu/W0gIehZLmkljhJXU/E0/dmuQzsA/4jhA==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.0.9.tgz", + "integrity": "sha512-15OACZcBtQ34keIEn19JYTVuMFTlFrClclwWjHo/IRPg/8ELpkgNTl0o7WLP9WO9XGH6+tip9CPYtEOrIDJvBA==", "dev": true, "license": "MIT", "dependencies": { @@ -4606,8 +4884,8 @@ "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "@vitest/browser": "3.0.4", - "vitest": "3.0.4" + "@vitest/browser": "3.0.9", + "vitest": "3.0.9" }, "peerDependenciesMeta": { "@vitest/browser": { @@ -4616,15 +4894,15 @@ } }, "node_modules/@vitest/expect": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.4.tgz", - "integrity": "sha512-Nm5kJmYw6P2BxhJPkO3eKKhGYKRsnqJqf+r0yOGRKpEP+bSCBDsjXgiu1/5QFrnPMEgzfC38ZEjvCFgaNBC0Eg==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.9.tgz", + "integrity": "sha512-5eCqRItYgIML7NNVgJj6TVCmdzE7ZVgJhruW0ziSQV4V7PvLkDL1bBkBdcTs/VuIz0IxPb5da1IDSqc1TR9eig==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.4", - "@vitest/utils": "3.0.4", - "chai": "^5.1.2", + "@vitest/spy": "3.0.9", + "@vitest/utils": "3.0.9", + "chai": "^5.2.0", "tinyrainbow": "^2.0.0" }, "funding": { @@ -4632,13 +4910,13 @@ } }, "node_modules/@vitest/mocker": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.4.tgz", - "integrity": "sha512-gEef35vKafJlfQbnyOXZ0Gcr9IBUsMTyTLXsEQwuyYAerpHqvXhzdBnDFuHLpFqth3F7b6BaFr4qV/Cs1ULx5A==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.9.tgz", + "integrity": "sha512-ryERPIBOnvevAkTq+L1lD+DTFBRcjueL9lOUfXsLfwP92h4e+Heb+PjiqS3/OURWPtywfafK0kj++yDFjWUmrA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.4", + "@vitest/spy": "3.0.9", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, @@ -4669,9 +4947,9 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.4.tgz", - "integrity": "sha512-ts0fba+dEhK2aC9PFuZ9LTpULHpY/nd6jhAQ5IMU7Gaj7crPCTdCFfgvXxruRBLFS+MLraicCuFXxISEq8C93g==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.9.tgz", + "integrity": "sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==", "dev": true, "license": "MIT", "dependencies": { @@ -4682,38 +4960,38 @@ } }, "node_modules/@vitest/runner": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.4.tgz", - "integrity": "sha512-dKHzTQ7n9sExAcWH/0sh1elVgwc7OJ2lMOBrAm73J7AH6Pf9T12Zh3lNE1TETZaqrWFXtLlx3NVrLRb5hCK+iw==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.9.tgz", + "integrity": "sha512-NX9oUXgF9HPfJSwl8tUZCMP1oGx2+Sf+ru6d05QjzQz4OwWg0psEzwY6VexP2tTHWdOkhKHUIZH+fS6nA7jfOw==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "3.0.4", - "pathe": "^2.0.2" + "@vitest/utils": "3.0.9", + "pathe": "^2.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/snapshot": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.4.tgz", - "integrity": "sha512-+p5knMLwIk7lTQkM3NonZ9zBewzVp9EVkVpvNta0/PlFWpiqLaRcF4+33L1it3uRUCh0BGLOaXPPGEjNKfWb4w==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.9.tgz", + "integrity": "sha512-AiLUiuZ0FuA+/8i19mTYd+re5jqjEc2jZbgJ2up0VY0Ddyyxg/uUtBDpIFAy4uzKaQxOW8gMgBdAJJ2ydhu39A==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.4", + "@vitest/pretty-format": "3.0.9", "magic-string": "^0.30.17", - "pathe": "^2.0.2" + "pathe": "^2.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/spy": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.4.tgz", - "integrity": "sha512-sXIMF0oauYyUy2hN49VFTYodzEAu744MmGcPR3ZBsPM20G+1/cSW/n1U+3Yu/zHxX2bIDe1oJASOkml+osTU6Q==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.9.tgz", + "integrity": "sha512-/CcK2UDl0aQ2wtkp3YVWldrpLRNCfVcIOFGlVGKO4R5eajsH393Z1yiXLVQ7vWsj26JOEjeZI0x5sm5P4OGUNQ==", "dev": true, "license": "MIT", "dependencies": { @@ -4724,36 +5002,36 @@ } }, "node_modules/@vitest/ui": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-3.0.4.tgz", - "integrity": "sha512-e+s2F9e9FUURkZ5aFIe1Fi3Y8M7UF6gEuShcaV/ur7y/Ldri+1tzWQ1TJq9Vas42NXnXvCAIrU39Z4U2RyET6g==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-3.0.9.tgz", + "integrity": "sha512-FpZD4aIv/qNpwkV3XbLV6xldWFHMgoNWAJEgg5GmpObmAOLAErpYjew9dDwXdYdKOS3iZRKdwI+P3JOJcYeUBg==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "3.0.4", + "@vitest/utils": "3.0.9", "fflate": "^0.8.2", - "flatted": "^3.3.2", - "pathe": "^2.0.2", - "sirv": "^3.0.0", - "tinyglobby": "^0.2.10", + "flatted": "^3.3.3", + "pathe": "^2.0.3", + "sirv": "^3.0.1", + "tinyglobby": "^0.2.12", "tinyrainbow": "^2.0.0" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "vitest": "3.0.4" + "vitest": "3.0.9" } }, "node_modules/@vitest/utils": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.4.tgz", - "integrity": "sha512-8BqC1ksYsHtbWH+DfpOAKrFw3jl3Uf9J7yeFh85Pz52IWuh1hBBtyfEbRNNZNjl8H8A5yMLH9/t+k7HIKzQcZQ==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.9.tgz", + "integrity": "sha512-ilHM5fHhZ89MCp5aAaM9uhfl1c2JdxVxl3McqsdVyVNN6JffnEen8UMCdRTzOhGXNQGo5GNL9QugHrz727Wnng==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.4", - "loupe": "^3.1.2", + "@vitest/pretty-format": "3.0.9", + "loupe": "^3.1.3", "tinyrainbow": "^2.0.0" }, "funding": { @@ -4761,19 +5039,19 @@ } }, "node_modules/@volar/language-core": { - "version": "2.4.11", - "resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-2.4.11.tgz", - "integrity": "sha512-lN2C1+ByfW9/JRPpqScuZt/4OrUUse57GLI6TbLgTIqBVemdl1wNcZ1qYGEo2+Gw8coYLgCy7SuKqn6IrQcQgg==", + "version": "2.4.12", + "resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-2.4.12.tgz", + "integrity": "sha512-RLrFdXEaQBWfSnYGVxvR2WrO6Bub0unkdHYIdC31HzIEqATIuuhRRzYu76iGPZ6OtA4Au1SnW0ZwIqPP217YhA==", "dev": true, "license": "MIT", "dependencies": { - "@volar/source-map": "2.4.11" + "@volar/source-map": "2.4.12" } }, "node_modules/@volar/source-map": { - "version": "2.4.11", - "resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-2.4.11.tgz", - "integrity": "sha512-ZQpmafIGvaZMn/8iuvCFGrW3smeqkq/IIh9F1SdSx9aUl0J4Iurzd6/FhmjNO5g2ejF3rT45dKskgXWiofqlZQ==", + "version": "2.4.12", + "resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-2.4.12.tgz", + "integrity": "sha512-bUFIKvn2U0AWojOaqf63ER0N/iHIBYZPpNGogfLPQ68F5Eet6FnLlyho7BS0y2HJ1jFhSif7AcuTx1TqsCzRzw==", "dev": true, "license": "MIT" }, @@ -4879,17 +5157,17 @@ } }, "node_modules/@vue/language-core": { - "version": "2.1.10", - "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-2.1.10.tgz", - "integrity": "sha512-DAI289d0K3AB5TUG3xDp9OuQ71CnrujQwJrQnfuZDwo6eGNf0UoRlPuaVNO+Zrn65PC3j0oB2i7mNmVPggeGeQ==", + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-2.2.4.tgz", + "integrity": "sha512-eGGdw7eWUwdIn9Fy/irJ7uavCGfgemuHQABgJ/hU1UgZFnbTg9VWeXvHQdhY+2SPQZWJqWXvRWIg67t4iWEa+Q==", "dev": true, "license": "MIT", "dependencies": { - "@volar/language-core": "~2.4.8", + "@volar/language-core": "~2.4.11", "@vue/compiler-dom": "^3.5.0", "@vue/compiler-vue2": "^2.7.16", "@vue/shared": "^3.5.0", - "alien-signals": "^0.2.0", + "alien-signals": "^1.0.3", "minimatch": "^9.0.3", "muggle-string": "^0.4.1", "path-browserify": "^1.0.1" @@ -5154,9 +5432,9 @@ } }, "node_modules/alien-signals": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-0.2.2.tgz", - "integrity": "sha512-cZIRkbERILsBOXTQmMrxc9hgpxglstn69zm+F1ARf4aPAzdAFYd6sBq87ErO0Fj3DV94tglcyHG5kQz9nDC/8A==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-1.0.6.tgz", + "integrity": "sha512-aITl4ODHNX9mqBqwZWr5oTYP74hemqVGV4KRLSQacjoZIdwNxbedHF656+c4zuGLtRtcowitoXdIfyrXgzniVg==", "dev": true, "license": "MIT" }, @@ -5678,9 +5956,9 @@ } }, "node_modules/chai": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.2.tgz", - "integrity": "sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz", + "integrity": "sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==", "dev": true, "license": "MIT", "dependencies": { @@ -6638,9 +6916,9 @@ } }, "node_modules/decode-named-character-reference": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", - "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.1.0.tgz", + "integrity": "sha512-Wy+JTSbFThEOXQIR2L6mxJvEs+veIzpmqD7ynWxMXGpnk3smkHQOp6forLdHsKpAMW9iJpaBBIxz285t1n1C3w==", "dev": true, "license": "MIT", "dependencies": { @@ -6942,15 +7220,15 @@ "license": "MIT" }, "node_modules/electron": { - "version": "34.0.2", - "resolved": "https://registry.npmjs.org/electron/-/electron-34.0.2.tgz", - "integrity": "sha512-u3F+DSUlg9NaGS+9qnYmSRN8VjAnc3LJDDk1ye1uISJnh4gjG76y3681qLowsPMx4obvCP2eBINnmbLo0yT5WA==", + "version": "35.0.3", + "resolved": "https://registry.npmjs.org/electron/-/electron-35.0.3.tgz", + "integrity": "sha512-kjQAYEWXSr2TyK19IZoF85dzFIBaYuX7Yp/C+34b5Y/jmI2z270CGie+RjmEGMMitsy0G8YJKftukhYMuWlK6g==", "dev": true, "hasInstallScript": true, "license": "MIT", "dependencies": { "@electron/get": "^2.0.0", - "@types/node": "^20.9.0", + "@types/node": "^22.7.7", "extract-zip": "^2.0.1" }, "bin": { @@ -6960,23 +7238,6 @@ "node": ">= 12.20.55" } }, - "node_modules/electron/node_modules/@types/node": { - "version": "20.17.12", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.12.tgz", - "integrity": "sha512-vo/wmBgMIiEA23A/knMfn/cf37VnuF52nZh5ZoW0GWt4e4sxNquibrMRJ7UQsA06+MBx9r/H1jsI9grYjQCQlw==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.19.2" - } - }, - "node_modules/electron/node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true, - "license": "MIT" - }, "node_modules/emoji-regex": { "version": "10.4.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", @@ -7386,9 +7647,9 @@ } }, "node_modules/es-toolkit": { - "version": "1.31.0", - "resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.31.0.tgz", - "integrity": "sha512-vwS0lv/tzjM2/t4aZZRAgN9I9TP0MSkWuvt6By+hEXfG/uLs8yg2S1/ayRXH/x3pinbLgVJYT+eppueg3cM6tg==", + "version": "1.33.0", + "resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.33.0.tgz", + "integrity": "sha512-X13Q/ZSc+vsO1q600bvNK4bxgXMkHcf//RxCmYDaRY5DAcT+eoXjY5hoAPGMdRnWQjvyLEcyauG3b6hz76LNqg==", "dev": true, "license": "MIT", "workspaces": [ @@ -7466,22 +7727,23 @@ } }, "node_modules/eslint": { - "version": "9.19.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.19.0.tgz", - "integrity": "sha512-ug92j0LepKlbbEv6hD911THhoRHmbdXt2gX+VDABAW/Ir7D3nqKdv5Pf5vtlyY6HQMTEP2skXY43ueqTCWssEA==", + "version": "9.23.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.23.0.tgz", + "integrity": "sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.0", - "@eslint/core": "^0.10.0", - "@eslint/eslintrc": "^3.2.0", - "@eslint/js": "9.19.0", - "@eslint/plugin-kit": "^0.2.5", + "@eslint/config-array": "^0.19.2", + "@eslint/config-helpers": "^0.2.0", + "@eslint/core": "^0.12.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.23.0", + "@eslint/plugin-kit": "^0.2.7", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.4.1", + "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", @@ -7489,7 +7751,7 @@ "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.2.0", + "eslint-scope": "^8.3.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", @@ -7564,23 +7826,20 @@ } }, "node_modules/eslint-import-resolver-typescript": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.7.0.tgz", - "integrity": "sha512-Vrwyi8HHxY97K5ebydMtffsWAn1SCR9eol49eCd5fJS4O1WV7PaAjbcjmbfJJSMz/t4Mal212Uz/fQZrOB8mow==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-4.2.2.tgz", + "integrity": "sha512-Rg1YEsb9UKLQ8BOv27cS3TZ6LhEAKQVgVOXArcE/sQrlnX8+FjmJRSC29ij1qrn+eurFuMsCFUcs7/+27T0vqQ==", "dev": true, "license": "ISC", "dependencies": { - "@nolyfill/is-core-module": "1.0.39", - "debug": "^4.3.7", - "enhanced-resolve": "^5.15.0", - "fast-glob": "^3.3.2", - "get-tsconfig": "^4.7.5", - "is-bun-module": "^1.0.2", - "is-glob": "^4.0.3", - "stable-hash": "^0.0.4" + "debug": "^4.4.0", + "get-tsconfig": "^4.10.0", + "rspack-resolver": "^1.2.2", + "stable-hash": "^0.0.5", + "tinyglobby": "^0.2.12" }, "engines": { - "node": "^14.18.0 || >=16.0.0" + "node": "^16.17.0 || >=18.6.0" }, "funding": { "url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts" @@ -7588,7 +7847,8 @@ "peerDependencies": { "eslint": "*", "eslint-plugin-import": "*", - "eslint-plugin-import-x": "*" + "eslint-plugin-import-x": "*", + "is-bun-module": "*" }, "peerDependenciesMeta": { "eslint-plugin-import": { @@ -7596,6 +7856,9 @@ }, "eslint-plugin-import-x": { "optional": true + }, + "is-bun-module": { + "optional": true } } }, @@ -7728,9 +7991,9 @@ } }, "node_modules/eslint-plugin-jsdoc": { - "version": "50.6.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-50.6.3.tgz", - "integrity": "sha512-NxbJyt1M5zffPcYZ8Nb53/8nnbIScmiLAMdoe0/FAszwb7lcSiX3iYBTsuF7RV84dZZJC8r3NghomrUXsmWvxQ==", + "version": "50.6.8", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-50.6.8.tgz", + "integrity": "sha512-PPZVqhoXaalMQwDGzcQrJtPSPIPOYsSMtvkjYAdsIazOW20yhYtVX4+jLL+XznD4zYTXyZbPWPRKkNev4D4lyw==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -7754,9 +8017,9 @@ } }, "node_modules/eslint-plugin-n": { - "version": "17.15.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-17.15.1.tgz", - "integrity": "sha512-KFw7x02hZZkBdbZEFQduRGH4VkIH4MW97ClsbAM4Y4E6KguBJWGfWG1P4HEIpZk2bkoWf0bojpnjNAhYQP8beA==", + "version": "17.16.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-17.16.2.tgz", + "integrity": "sha512-iQM5Oj+9o0KaeLoObJC/uxNGpktZCkYiTTBo8PkRWq3HwNcRxwpvSDFjBhQ5+HLJzBTy+CLDC5+bw0Z5GyhlOQ==", "dev": true, "license": "MIT", "dependencies": { @@ -7803,9 +8066,9 @@ } }, "node_modules/eslint-scope": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz", - "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", + "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -8130,9 +8393,9 @@ } }, "node_modules/expect-type": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.1.0.tgz", - "integrity": "sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.0.tgz", + "integrity": "sha512-80F22aiJ3GLyVnS/B3HzgR6RelZVumzj9jkL0Rhz4h0xYbNW9PjlQz5h3J/SShErbXBc295vseR4/MIbVmUbeA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -8264,9 +8527,9 @@ "license": "MIT" }, "node_modules/fast-uri": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.5.tgz", - "integrity": "sha512-5JnBCWpFlMo0a3ciDy/JckMzzv1U9coZrIhedq+HXxxUfDTAiS0LA8OKVao4G9BxmCVck/jtA5r3KAtRWEyD8Q==", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", + "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==", "dev": true, "funding": [ { @@ -8301,9 +8564,9 @@ } }, "node_modules/fdir": { - "version": "6.4.2", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.2.tgz", - "integrity": "sha512-KnhMXsKSPZlAhp7+IjUkRZKPb4fUyccpDrdFXbi4QL1qkmFh9kVY09Yox+n4MaOb3lHZ1Tv829C3oaaXoMYPDQ==", + "version": "6.4.3", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.3.tgz", + "integrity": "sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw==", "dev": true, "license": "MIT", "peerDependencies": { @@ -8467,9 +8730,9 @@ } }, "node_modules/flatted": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", - "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", "dev": true, "license": "ISC" }, @@ -8865,9 +9128,9 @@ } }, "node_modules/get-tsconfig": { - "version": "4.8.1", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.8.1.tgz", - "integrity": "sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg==", + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.0.tgz", + "integrity": "sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A==", "dev": true, "license": "MIT", "dependencies": { @@ -9039,18 +9302,18 @@ } }, "node_modules/globby": { - "version": "14.0.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.2.tgz", - "integrity": "sha512-s3Fq41ZVh7vbbe2PN3nrW7yC7U7MFVc5c98/iTl9c2GawNMKx/J648KQRW6WKkuU8GIbbh2IXfIRQjOZnXcTnw==", + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", "dev": true, "license": "MIT", "dependencies": { "@sindresorhus/merge-streams": "^2.1.0", - "fast-glob": "^3.3.2", - "ignore": "^5.2.4", - "path-type": "^5.0.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", "slash": "^5.1.0", - "unicorn-magic": "^0.1.0" + "unicorn-magic": "^0.3.0" }, "engines": { "node": ">=18" @@ -9072,24 +9335,27 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/globby/node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "node_modules/globby/node_modules/path-type": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", "dev": true, "license": "MIT", "engines": { - "node": ">= 4" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/globby/node_modules/path-type": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz", - "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==", + "node_modules/globby/node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", "dev": true, "license": "MIT", "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -9430,9 +9696,9 @@ } }, "node_modules/hast-util-to-html": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.4.tgz", - "integrity": "sha512-wxQzXtdbhiwGAUKrnQJXlOPmHnEehzphwkK7aluUPQ+lEc1xefC8pblMgpp2w5ldBTEfveRIrADcrhGIWrlTDA==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz", + "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==", "dev": true, "license": "MIT", "dependencies": { @@ -9443,7 +9709,7 @@ "hast-util-whitespace": "^3.0.0", "html-void-elements": "^3.0.0", "mdast-util-to-hast": "^13.0.0", - "property-information": "^6.0.0", + "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "stringify-entities": "^4.0.0", "zwitch": "^2.0.4" @@ -9453,6 +9719,17 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/hast-util-to-html/node_modules/property-information": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.0.0.tgz", + "integrity": "sha512-7D/qOz/+Y4X/rzSB6jKxKUsQnphO046ei8qxG59mtM3RG3DHgTK81HrxrmoDVINJb8NKT5ZsRbwHvQ6B68Iyhg==", + "dev": true, + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/hast-util-to-string": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-3.0.1.tgz", @@ -9660,9 +9937,9 @@ } }, "node_modules/ignore": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.0.tgz", - "integrity": "sha512-lcX8PNQygAa22u/0BysEY8VhaFRzlOkvdlKczDPnJvrkJD1EuqzEky5VYYKM2iySIuaVIDv9N190DfSreSLw2A==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.3.tgz", + "integrity": "sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==", "license": "MIT", "engines": { "node": ">= 4" @@ -9971,6 +10248,8 @@ "integrity": "sha512-DgXeu5UWI0IsMQundYb5UAOzm6G2eVnarJ0byP6Tm55iZNKceD59LNPA2L4VvsScTtHcw0yEkVwSf7PC+QoLSA==", "dev": true, "license": "MIT", + "optional": true, + "peer": true, "dependencies": { "semver": "^7.6.3" } @@ -10656,9 +10935,9 @@ } }, "node_modules/less": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/less/-/less-4.2.1.tgz", - "integrity": "sha512-CasaJidTIhWmjcqv0Uj5vccMI7pJgfD9lMkKtlnTHAdJdYK/7l8pM9tumLyJ0zhbD4KJLo/YvTj+xznQd5NBhg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/less/-/less-4.2.2.tgz", + "integrity": "sha512-tkuLHQlvWUTeQ3doAqnHbNn8T6WX1KA8yvbKG9x4VtKtIjHsVKQZCH11zRgAfbDAXC2UNIg/K9BYAAcEzUIrNg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -10816,13 +11095,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true, - "license": "MIT" - }, "node_modules/lodash-es": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", @@ -11188,9 +11460,9 @@ } }, "node_modules/mdast-util-gfm": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz", - "integrity": "sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz", + "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==", "dev": true, "license": "MIT", "dependencies": { @@ -11226,9 +11498,9 @@ } }, "node_modules/mdast-util-gfm-footnote": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz", - "integrity": "sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==", "dev": true, "license": "MIT", "dependencies": { @@ -11414,9 +11686,9 @@ } }, "node_modules/micromark": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.1.tgz", - "integrity": "sha512-eBPdkcoCNvYcxQOAKAlceo5SNdzZWfF+FcSupREAzdAh9rRmE239CEQAiTwIgblwnoM8zzj35sZ5ZwvSEOF6Kw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", + "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", "dev": true, "funding": [ { @@ -11450,9 +11722,9 @@ } }, "node_modules/micromark-core-commonmark": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.2.tgz", - "integrity": "sha512-FKjQKbxd1cibWMM1P9N+H8TwlgGgSkWZMmfuVucLCHaYqeSvJ0hFeHsIa65pA2nYbes0f8LDHPMrd9X7Ujxg9w==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", + "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", "dev": true, "funding": [ { @@ -11820,9 +12092,9 @@ } }, "node_modules/micromark-util-subtokenize": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.3.tgz", - "integrity": "sha512-VXJJuNxYWSoYL6AJ6OQECCFGhIU2GGHMw8tahogePBrjkG8aCCas3ibkp7RnVOSTClg2is05/R7maAhF1XyQMg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", + "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", "dev": true, "funding": [ { @@ -11940,16 +12212,6 @@ "node": ">= 0.6" } }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/mimic-function": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", @@ -12059,9 +12321,9 @@ } }, "node_modules/mrmime": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.0.tgz", - "integrity": "sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", + "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", "dev": true, "license": "MIT", "engines": { @@ -12094,9 +12356,9 @@ } }, "node_modules/nanoid": { - "version": "5.0.9", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz", - "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==", + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.5.tgz", + "integrity": "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw==", "funding": [ { "type": "github", @@ -12151,9 +12413,9 @@ "license": "MIT" }, "node_modules/node-addon-api": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-8.3.0.tgz", - "integrity": "sha512-8VOpLHFrOQlAH+qA0ZzuGRlALRA6/LVh8QJldbrC4DY0hXoMP0l4Acq8TzFC018HztWiRqyCEj2aTWY2UvnJUg==", + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-8.3.1.tgz", + "integrity": "sha512-lytcDEdxKjGJPTLEfW4mYMigRezMlyJY8W4wxJK8zE533Jlb8L8dRuObJFWg2P+AuOIxoCgKF+2Oq4d4Zd0OUA==", "license": "MIT", "engines": { "node": "^18 || ^20 || >= 21" @@ -15150,20 +15412,20 @@ } }, "node_modules/octokit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/octokit/-/octokit-4.1.0.tgz", - "integrity": "sha512-/UrQAOSvkc+lUUWKNzy4ByAgYU9KpFzZQt8DnC962YmQuDiZb1SNJ90YukCCK5aMzKqqCA+z1kkAlmzYvdYKag==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/octokit/-/octokit-4.1.2.tgz", + "integrity": "sha512-0kcTxJOK3yQrJsRb8wKa28hlTze4QOz4sLuUnfXXnhboDhFKgv8LxS86tFwbsafDW9JZ08ByuVAE8kQbYJIZkA==", "license": "MIT", "dependencies": { - "@octokit/app": "^15.1.2", - "@octokit/core": "^6.1.3", - "@octokit/oauth-app": "^7.1.4", + "@octokit/app": "^15.1.4", + "@octokit/core": "^6.1.4", + "@octokit/oauth-app": "^7.1.6", "@octokit/plugin-paginate-graphql": "^5.2.4", - "@octokit/plugin-paginate-rest": "^11.4.0", - "@octokit/plugin-rest-endpoint-methods": "^13.3.0", - "@octokit/plugin-retry": "^7.1.3", + "@octokit/plugin-paginate-rest": "^11.4.2", + "@octokit/plugin-rest-endpoint-methods": "^13.3.1", + "@octokit/plugin-retry": "^7.1.4", "@octokit/plugin-throttling": "^9.4.0", - "@octokit/request-error": "^6.1.6", + "@octokit/request-error": "^6.1.7", "@octokit/types": "^13.7.0" }, "engines": { @@ -15195,6 +15457,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/oniguruma-parser": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/oniguruma-parser/-/oniguruma-parser-0.5.4.tgz", + "integrity": "sha512-yNxcQ8sKvURiTwP0mV6bLQCYE7NKfKRRWunhbZnXgxSmB1OXa1lHrN3o4DZd+0Si0kU5blidK7BcROO8qv5TZA==", + "dev": true, + "license": "MIT" + }, "node_modules/oniguruma-to-es": { "version": "0.10.0", "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-0.10.0.tgz", @@ -15226,9 +15495,9 @@ } }, "node_modules/ora": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-8.1.1.tgz", - "integrity": "sha512-YWielGi1XzG1UTvOaCFaNgEnuhZVMSHYkW/FQ7UX8O26PtlpdM84c0f7wLPlkvx2RfiQmnzd61d/MGxmpQeJPw==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/ora/-/ora-8.2.0.tgz", + "integrity": "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw==", "license": "MIT", "dependencies": { "chalk": "^5.3.0", @@ -15604,9 +15873,9 @@ } }, "node_modules/pathe": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.2.tgz", - "integrity": "sha512-15Ztpk+nov8DR524R4BF7uEuzESgzUEAV4Ah7CUMNGXdE5ELuvxElxGXndBl32vMSsWa1jpNf22Z+Er3sKwq+w==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "dev": true, "license": "MIT" }, @@ -16440,6 +16709,29 @@ "fsevents": "~2.3.2" } }, + "node_modules/rspack-resolver": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/rspack-resolver/-/rspack-resolver-1.2.2.tgz", + "integrity": "sha512-Fwc19jMBA3g+fxDJH2B4WxwZjE0VaaOL7OX/A4Wn5Zv7bOD/vyPZhzXfaO73Xc2GAlfi96g5fGUa378WbIGfFw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/JounQin" + }, + "optionalDependencies": { + "@unrs/rspack-resolver-binding-darwin-arm64": "1.2.2", + "@unrs/rspack-resolver-binding-darwin-x64": "1.2.2", + "@unrs/rspack-resolver-binding-freebsd-x64": "1.2.2", + "@unrs/rspack-resolver-binding-linux-arm-gnueabihf": "1.2.2", + "@unrs/rspack-resolver-binding-linux-arm64-gnu": "1.2.2", + "@unrs/rspack-resolver-binding-linux-arm64-musl": "1.2.2", + "@unrs/rspack-resolver-binding-linux-x64-gnu": "1.2.2", + "@unrs/rspack-resolver-binding-linux-x64-musl": "1.2.2", + "@unrs/rspack-resolver-binding-wasm32-wasi": "1.2.2", + "@unrs/rspack-resolver-binding-win32-arm64-msvc": "1.2.2", + "@unrs/rspack-resolver-binding-win32-x64-msvc": "1.2.2" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -16577,9 +16869,9 @@ } }, "node_modules/semantic-release": { - "version": "24.2.1", - "resolved": "https://registry.npmjs.org/semantic-release/-/semantic-release-24.2.1.tgz", - "integrity": "sha512-z0/3cutKNkLQ4Oy0HTi3lubnjTsdjjgOqmxdPjeYWe6lhFqUPfwslZxRHv3HDZlN4MhnZitb9SLihDkZNxOXfQ==", + "version": "24.2.3", + "resolved": "https://registry.npmjs.org/semantic-release/-/semantic-release-24.2.3.tgz", + "integrity": "sha512-KRhQG9cUazPavJiJEFIJ3XAMjgfd0fcK3B+T26qOl8L0UG5aZUjeRfREO0KM5InGtYwxqiiytkJrbcYoLDEv0A==", "dev": true, "license": "MIT", "dependencies": { @@ -16620,16 +16912,6 @@ "node": ">=20.8.1" } }, - "node_modules/semantic-release/node_modules/@semantic-release/error": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-4.0.0.tgz", - "integrity": "sha512-mgdxrHTLOjOddRVYIYDo0fR3/v61GNN1YGkfbrjuIKg/uMgCd+Qzo3UAXJ+woLQQpos4pl5Esuw5A7AoNlzjUQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, "node_modules/semantic-release/node_modules/aggregate-error": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-5.0.0.tgz", @@ -16716,9 +16998,9 @@ } }, "node_modules/semver": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.0.tgz", - "integrity": "sha512-DrfFnPzblFmNrIZzg5RzHegbiRWg7KMR7btwi2yjHwx06zsUbO5g613sVwEV7FTwmzJu+Io0lJe2GJ3LxqpvBQ==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -17223,9 +17505,9 @@ "license": "MIT" }, "node_modules/sirv": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.0.tgz", - "integrity": "sha512-BPwJGUeDaDCHihkORDchNyyTvWFhcusy1XMmhEVTQTwGeybFbp8YEmB+njbPnth1FibULBSBVwCQni25XlCUDg==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.1.tgz", + "integrity": "sha512-FoqMu0NCGBLCcAkS1qA+XJIQTR6/JHfQXl+uGteNCQ76T91DMUjPa9xfmeqMY3z80nLSg9yQmNjK0Px6RWsH/A==", "dev": true, "license": "MIT", "dependencies": { @@ -17406,9 +17688,9 @@ "optional": true }, "node_modules/stable-hash": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.4.tgz", - "integrity": "sha512-LjdcbuBeLcdETCrPn9i8AYAZ1eCtu4ECAWtP7UleOiZ9LzVxRzzUZEoZ8zB24nhkQnDWyET0I+3sWokSDS3E7g==", + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.5.tgz", + "integrity": "sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA==", "dev": true, "license": "MIT" }, @@ -17420,9 +17702,9 @@ "license": "MIT" }, "node_modules/std-env": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.0.tgz", - "integrity": "sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.1.tgz", + "integrity": "sha512-vj5lIj3Mwf9D79hBkltk5qmkFI+biIKWS2IBxEyEU3AX1tUf7AoL8nSazCOiiqQsGKIq01SClsKEzweu34uwvA==", "dev": true, "license": "MIT" }, @@ -18243,17 +18525,20 @@ "license": "MIT" }, "node_modules/tinyglobby": { - "version": "0.2.10", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.10.tgz", - "integrity": "sha512-Zc+8eJlFMvgatPZTl6A9L/yht8QqdmUNtURHaKZLmKBE12hNPSrqNkUp2cs3M/UKmNVVAMFQYSjYIVHDjW5zew==", + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.12.tgz", + "integrity": "sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww==", "dev": true, "license": "MIT", "dependencies": { - "fdir": "^6.4.2", + "fdir": "^6.4.3", "picomatch": "^4.0.2" }, "engines": { "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" } }, "node_modules/tinypool": { @@ -18354,9 +18639,9 @@ } }, "node_modules/ts-api-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.0.0.tgz", - "integrity": "sha512-xCt/TOAc+EOHS1XPnijD3/yzpH6qg2xppZO1YDqGoVsNXfQfzHpOdNuXwrwOU8u4ITXJyDCTyt8w5g1sZv9ynQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", "dev": true, "license": "MIT", "engines": { @@ -18408,23 +18693,44 @@ "license": "MIT" }, "node_modules/twoslash-vue": { - "version": "0.2.12", - "resolved": "https://registry.npmjs.org/twoslash-vue/-/twoslash-vue-0.2.12.tgz", - "integrity": "sha512-kxH60DLn2QBcN2wjqxgMDkyRgmPXsytv7fJIlsyFMDPSkm1/lMrI/UMrNAshNaRHcI+hv8x3h/WBgcvlb2RNAQ==", + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/twoslash-vue/-/twoslash-vue-0.3.1.tgz", + "integrity": "sha512-9/PS0/iL2m8G6N2ILdI18sZ8l6ex+W2nN5jIaTpfFPlnY0MOX2G5UxEVs+AuNimM9SwEnwfiIuDY9ubDCIQpSQ==", "dev": true, "license": "MIT", "dependencies": { - "@vue/language-core": "~2.1.6", - "twoslash": "0.2.12", - "twoslash-protocol": "0.2.12" + "@vue/language-core": "2.2.4", + "twoslash": "0.3.1", + "twoslash-protocol": "0.3.1" }, "funding": { "url": "https://github.com/sponsors/antfu" }, "peerDependencies": { - "typescript": "*" + "typescript": "^5.5.0" + } + }, + "node_modules/twoslash-vue/node_modules/twoslash": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/twoslash/-/twoslash-0.3.1.tgz", + "integrity": "sha512-OGqMTGvqXTcb92YQdwGfEdK0nZJA64Aj/ChLOelbl3TfYch2IoBST0Yx4C0LQ7Lzyqm9RpgcpgDxeXQIz4p2Kg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript/vfs": "^1.6.1", + "twoslash-protocol": "0.3.1" + }, + "peerDependencies": { + "typescript": "^5.5.0" } }, + "node_modules/twoslash-vue/node_modules/twoslash-protocol": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/twoslash-protocol/-/twoslash-protocol-0.3.1.tgz", + "integrity": "sha512-BMePTL9OkuNISSyyMclBBhV2s9++DiOCyhhCoV5Kaht6eaWLwVjCCUJHY33eZJPsyKeZYS8Wzz0h+XI01VohVw==", + "dev": true, + "license": "MIT" + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -18439,9 +18745,9 @@ } }, "node_modules/type-fest": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.31.0.tgz", - "integrity": "sha512-yCxltHW07Nkhv/1F6wWBr8kz+5BGMfP+RbRSYFnegVb0qV/UMT0G0ElBloPVerqn4M2ZV80Ir1FtCcYv1cT6vQ==", + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.37.0.tgz", + "integrity": "sha512-S/5/0kFftkq27FPNye0XM1e2NsnoD/3FS+pBmbjmmtLT6I+i344KoOf7pvXreaFsDamWeaJX55nczA1m5PsBDg==", "dev": true, "license": "(MIT OR CC0-1.0)", "engines": { @@ -18530,49 +18836,50 @@ } }, "node_modules/typedoc": { - "version": "0.27.6", - "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.27.6.tgz", - "integrity": "sha512-oBFRoh2Px6jFx366db0lLlihcalq/JzyCVp7Vaq1yphL/tbgx2e+bkpkCgJPunaPvPwoTOXSwasfklWHm7GfAw==", + "version": "0.28.1", + "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.28.1.tgz", + "integrity": "sha512-Mn2VPNMaxoe/hlBiLriG4U55oyAa3Xo+8HbtEwV7F5WEOPXqtxzGuMZhJYHaqFJpajeQ6ZDUC2c990NAtTbdgw==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@gerrit0/mini-shiki": "^1.24.0", + "@gerrit0/mini-shiki": "^3.2.1", "lunr": "^2.3.9", "markdown-it": "^14.1.0", "minimatch": "^9.0.5", - "yaml": "^2.6.1" + "yaml": "^2.7.0 " }, "bin": { "typedoc": "bin/typedoc" }, "engines": { - "node": ">= 18" + "node": ">= 18", + "pnpm": ">= 10" }, "peerDependencies": { - "typescript": "5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x || 5.7.x" + "typescript": "5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x || 5.7.x || 5.8.x" } }, "node_modules/typedoc-plugin-markdown": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/typedoc-plugin-markdown/-/typedoc-plugin-markdown-4.4.1.tgz", - "integrity": "sha512-fx23nSCvewI9IR8lzIYtzDphETcgTDuxKcmHKGD4lo36oexC+B1k4NaCOY58Snqb4OlE8OXDAGVcQXYYuLRCNw==", + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/typedoc-plugin-markdown/-/typedoc-plugin-markdown-4.5.2.tgz", + "integrity": "sha512-n0wfkCQU4nts13v8RSWMzIGNMbDo4P+oumHW6JudriknJLJSzx7p19OKJP8rKXvBkA+SFFuT7mW8lkMZZROz4g==", "dev": true, "license": "MIT", "engines": { "node": ">= 18" }, "peerDependencies": { - "typedoc": "0.27.x" + "typedoc": "0.28.x" } }, "node_modules/typedoc-plugin-mdn-links": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/typedoc-plugin-mdn-links/-/typedoc-plugin-mdn-links-4.0.10.tgz", - "integrity": "sha512-XlJttXrXLIlkU429vDjcKpTwrBTQ1FPpskEHqBfQca9HrvT3Ohl5lOJWsij23Puk6LsI+6l7RPRoHtL7ZTtROA==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/typedoc-plugin-mdn-links/-/typedoc-plugin-mdn-links-5.0.1.tgz", + "integrity": "sha512-eofdcc2nZZpipz/ubjG+7UYMi6Xu95svUwnZ+ClJh6NJdrv7kAOerL9N3iDOpo5kwQeK86GqPWwnv6LUGo5Wrw==", "dev": true, "license": "MIT", "peerDependencies": { - "typedoc": "0.26.x || 0.27.x" + "typedoc": "0.27.x || 0.28.x" } }, "node_modules/typedoc-vitepress-theme": { @@ -18586,9 +18893,9 @@ } }, "node_modules/typescript": { - "version": "5.7.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", - "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", + "version": "5.8.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz", + "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", "dev": true, "license": "Apache-2.0", "bin": { @@ -18600,15 +18907,15 @@ } }, "node_modules/typescript-eslint": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.22.0.tgz", - "integrity": "sha512-Y2rj210FW1Wb6TWXzQc5+P+EWI9/zdS57hLEc0gnyuvdzWo8+Y8brKlbj0muejonhMI/xAZCnZZwjbIfv1CkOw==", + "version": "8.27.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.27.0.tgz", + "integrity": "sha512-ZZ/8+Y0rRUMuW1gJaPtLWe4ryHbsPLzzibk5Sq+IFa2aOH1Vo0gPr1fbA6pOnzBke7zC2Da4w8AyCgxKXo3lqA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.22.0", - "@typescript-eslint/parser": "8.22.0", - "@typescript-eslint/utils": "8.22.0" + "@typescript-eslint/eslint-plugin": "8.27.0", + "@typescript-eslint/parser": "8.27.0", + "@typescript-eslint/utils": "8.27.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -18619,7 +18926,7 @@ }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "typescript": ">=4.8.4 <5.9.0" } }, "node_modules/uc.micro": { @@ -18809,9 +19116,9 @@ } }, "node_modules/universal-github-app-jwt": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/universal-github-app-jwt/-/universal-github-app-jwt-2.2.0.tgz", - "integrity": "sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/universal-github-app-jwt/-/universal-github-app-jwt-2.2.2.tgz", + "integrity": "sha512-dcmbeSrOdTnsjGjUfAlqNDJrhxXizjAz94ija9Qw8YkZ1uu0d+GoZzyH+Jb9tIIqvGsadUfwg+22k5aDqqwzbw==", "license": "MIT" }, "node_modules/universal-user-agent": { @@ -18992,16 +19299,16 @@ } }, "node_modules/vite-node": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.4.tgz", - "integrity": "sha512-7JZKEzcYV2Nx3u6rlvN8qdo3QV7Fxyt6hx+CCKz9fbWxdX5IvUOmTWEAxMrWxaiSf7CKGLJQ5rFu8prb/jBjOA==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.9.tgz", + "integrity": "sha512-w3Gdx7jDcuT9cNn9jExXgOyKmf5UOTb6WMHz8LGAm54eS1Elf5OuBhCxl6zJxGhEeIkgsE1WbHuoL0mj/UXqXg==", "dev": true, "license": "MIT", "dependencies": { "cac": "^6.7.14", "debug": "^4.4.0", "es-module-lexer": "^1.6.0", - "pathe": "^2.0.2", + "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0" }, "bin": { @@ -19118,31 +19425,31 @@ } }, "node_modules/vitest": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.4.tgz", - "integrity": "sha512-6XG8oTKy2gnJIFTHP6LD7ExFeNLxiTkK3CfMvT7IfR8IN+BYICCf0lXUQmX7i7JoxUP8QmeP4mTnWXgflu4yjw==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz", + "integrity": "sha512-BbcFDqNyBlfSpATmTtXOAOj71RNKDDvjBM/uPfnxxVGrG+FSH2RQIwgeEngTaTkuU/h0ScFvf+tRcKfYXzBybQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/expect": "3.0.4", - "@vitest/mocker": "3.0.4", - "@vitest/pretty-format": "^3.0.4", - "@vitest/runner": "3.0.4", - "@vitest/snapshot": "3.0.4", - "@vitest/spy": "3.0.4", - "@vitest/utils": "3.0.4", - "chai": "^5.1.2", + "@vitest/expect": "3.0.9", + "@vitest/mocker": "3.0.9", + "@vitest/pretty-format": "^3.0.9", + "@vitest/runner": "3.0.9", + "@vitest/snapshot": "3.0.9", + "@vitest/spy": "3.0.9", + "@vitest/utils": "3.0.9", + "chai": "^5.2.0", "debug": "^4.4.0", "expect-type": "^1.1.0", "magic-string": "^0.30.17", - "pathe": "^2.0.2", + "pathe": "^2.0.3", "std-env": "^3.8.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", "tinypool": "^1.0.2", "tinyrainbow": "^2.0.0", "vite": "^5.0.0 || ^6.0.0", - "vite-node": "3.0.4", + "vite-node": "3.0.9", "why-is-node-running": "^2.3.0" }, "bin": { @@ -19158,8 +19465,8 @@ "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.0.4", - "@vitest/ui": "3.0.4", + "@vitest/browser": "3.0.9", + "@vitest/ui": "3.0.9", "happy-dom": "*", "jsdom": "*" }, @@ -19574,9 +19881,9 @@ } }, "node_modules/yocto-queue": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.1.1.tgz", - "integrity": "sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.1.tgz", + "integrity": "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==", "dev": true, "license": "MIT", "engines": { @@ -19610,9 +19917,9 @@ } }, "node_modules/zx": { - "version": "8.3.1", - "resolved": "https://registry.npmjs.org/zx/-/zx-8.3.1.tgz", - "integrity": "sha512-MjNXfysB0Rv/lMi8oFa3a3flNC/KKqhguuarqxYM/uz/WFoD/AgHDkDdoDD4Y4nRjaCDphJJBTeeaPAE1P7fhA==", + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/zx/-/zx-8.4.1.tgz", + "integrity": "sha512-1Cb+Tfwt/daKV6wckBeDbB6h3IMauqj9KWp+EcbYzi9doeJeIHCktxp/yWspXOXRdoUzBCQSKoUgm3g8r9fz5A==", "dev": true, "license": "Apache-2.0", "bin": { @@ -19620,10 +19927,6 @@ }, "engines": { "node": ">= 12.17.0" - }, - "optionalDependencies": { - "@types/fs-extra": ">=11", - "@types/node": ">=20" } } } diff --git a/package.json b/package.json index 41653d51..30afc487 100644 --- a/package.json +++ b/package.json @@ -39,10 +39,10 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "scripts": { - "prepare": "[ \"$CI\" = true ] || [ -d '.husky/_' ] || husky", + "prepare": "node --experimental-require-module -e \"process.env.CI !== 'true' && console.log(require('husky').default())\"", "postinstall": "cd templates && npm install", "postversion": "vite-node scripts/postVersion.ts", "prebuild": "rimraf ./dist ./tsconfig.tsbuildinfo", @@ -111,6 +111,8 @@ "catai", "mistral", "deepseek", + "qwen", + "qwq", "typescript", "lora", "batching", @@ -131,53 +133,53 @@ }, "homepage": "https://node-llama-cpp.withcat.ai", "devDependencies": { - "@commitlint/cli": "^19.6.1", - "@commitlint/config-conventional": "^19.6.0", - "@eslint/compat": "^1.2.5", - "@fontsource/inter": "^5.1.1", - "@nolebase/vitepress-plugin-git-changelog": "^2.12.1", - "@nolebase/vitepress-plugin-og-image": "^2.12.1", + "@commitlint/cli": "^19.8.0", + "@commitlint/config-conventional": "^19.8.0", + "@eslint/compat": "^1.2.7", + "@fontsource/inter": "^5.2.5", + "@nolebase/vitepress-plugin-git-changelog": "^2.15.1", + "@nolebase/vitepress-plugin-og-image": "^2.15.1", "@resvg/resvg-js": "^2.6.2", - "@semantic-release/exec": "^6.0.3", + "@semantic-release/exec": "^7.0.3", "@semantic-release/github": "11.0.1", "@semantic-release/npm": "12.0.1", - "@shikijs/vitepress-twoslash": "^2.2.0", - "@stylistic/eslint-plugin": "^3.0.1", + "@shikijs/vitepress-twoslash": "^3.2.1", + "@stylistic/eslint-plugin": "^4.2.0", "@types/async-retry": "^1.4.9", "@types/bytes": "^3.1.5", "@types/cross-spawn": "^6.0.6", "@types/fs-extra": "^11.0.4", - "@types/node": "^22.12.0", + "@types/node": "^22.13.11", "@types/proper-lockfile": "^4.1.4", "@types/semver": "^7.5.8", "@types/validate-npm-package-name": "^4.0.2", "@types/which": "^3.0.4", "@types/yargs": "^17.0.33", - "@vitest/coverage-v8": "^3.0.4", - "@vitest/ui": "^3.0.4", - "electron": "^34.0.2", - "eslint": "^9.19.0", - "eslint-import-resolver-typescript": "^3.7.0", + "@vitest/coverage-v8": "^3.0.9", + "@vitest/ui": "^3.0.9", + "electron": "^35.0.3", + "eslint": "^9.23.0", + "eslint-import-resolver-typescript": "^4.2.2", "eslint-plugin-import": "^2.31.0", - "eslint-plugin-jsdoc": "^50.6.3", - "eslint-plugin-n": "^17.15.1", + "eslint-plugin-jsdoc": "^50.6.8", + "eslint-plugin-n": "^17.16.2", "feed": "^4.2.2", "husky": "^9.1.7", "rehype": "^13.0.2", "rimraf": "^6.0.1", - "semantic-release": "^24.2.1", + "semantic-release": "^24.2.3", "sharp": "^0.33.5", "tslib": "^2.8.1", - "typedoc": "^0.27.6", - "typedoc-plugin-markdown": "^4.4.1", - "typedoc-plugin-mdn-links": "^4.0.10", + "typedoc": "^0.28.1", + "typedoc-plugin-markdown": "^4.5.2", + "typedoc-plugin-mdn-links": "^5.0.1", "typedoc-vitepress-theme": "^1.1.2", - "typescript": "^5.7.3", - "typescript-eslint": "^8.22.0", - "vite-node": "^3.0.4", + "typescript": "^5.8.2", + "typescript-eslint": "^8.27.0", + "vite-node": "^3.0.9", "vitepress": "^1.6.3", - "vitest": "^3.0.4", - "zx": "^8.3.1" + "vitest": "^3.0.9", + "zx": "^8.4.1" }, "dependencies": { "@huggingface/jinja": "^0.3.3", @@ -196,13 +198,13 @@ "is-unicode-supported": "^2.1.0", "lifecycle-utils": "^2.0.0", "log-symbols": "^7.0.0", - "nanoid": "^5.0.9", - "node-addon-api": "^8.3.0", - "octokit": "^4.1.0", - "ora": "^8.1.1", + "nanoid": "^5.1.5", + "node-addon-api": "^8.3.1", + "octokit": "^4.1.2", + "ora": "^8.2.0", "pretty-ms": "^9.2.0", "proper-lockfile": "^4.1.2", - "semver": "^7.7.0", + "semver": "^7.7.1", "simple-git": "^3.27.0", "slice-ansi": "^7.1.0", "stdout-update": "^4.0.1", diff --git a/packages/@node-llama-cpp/linux-arm64/package-lock.json b/packages/@node-llama-cpp/linux-arm64/package-lock.json index 64811136..3d6fd638 100644 --- a/packages/@node-llama-cpp/linux-arm64/package-lock.json +++ b/packages/@node-llama-cpp/linux-arm64/package-lock.json @@ -19,7 +19,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/linux-arm64/package.json b/packages/@node-llama-cpp/linux-arm64/package.json index 926f9cf1..40fcef10 100644 --- a/packages/@node-llama-cpp/linux-arm64/package.json +++ b/packages/@node-llama-cpp/linux-arm64/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["linux"], "cpu": ["arm64", "x64"], diff --git a/packages/@node-llama-cpp/linux-armv7l/package-lock.json b/packages/@node-llama-cpp/linux-armv7l/package-lock.json index 1895d2e3..dfed42ac 100644 --- a/packages/@node-llama-cpp/linux-armv7l/package-lock.json +++ b/packages/@node-llama-cpp/linux-armv7l/package-lock.json @@ -19,7 +19,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/linux-armv7l/package.json b/packages/@node-llama-cpp/linux-armv7l/package.json index c48bc32d..52c0e376 100644 --- a/packages/@node-llama-cpp/linux-armv7l/package.json +++ b/packages/@node-llama-cpp/linux-armv7l/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["linux"], "cpu": ["arm", "x64"], diff --git a/packages/@node-llama-cpp/linux-x64-cuda/package-lock.json b/packages/@node-llama-cpp/linux-x64-cuda/package-lock.json index 1e4ad5c5..fc278839 100644 --- a/packages/@node-llama-cpp/linux-x64-cuda/package-lock.json +++ b/packages/@node-llama-cpp/linux-x64-cuda/package-lock.json @@ -18,7 +18,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/linux-x64-cuda/package.json b/packages/@node-llama-cpp/linux-x64-cuda/package.json index c8ae63f6..ca0e0b74 100644 --- a/packages/@node-llama-cpp/linux-x64-cuda/package.json +++ b/packages/@node-llama-cpp/linux-x64-cuda/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["linux"], "cpu": ["x64"], diff --git a/packages/@node-llama-cpp/linux-x64-vulkan/package-lock.json b/packages/@node-llama-cpp/linux-x64-vulkan/package-lock.json index 4329124c..e782e92e 100644 --- a/packages/@node-llama-cpp/linux-x64-vulkan/package-lock.json +++ b/packages/@node-llama-cpp/linux-x64-vulkan/package-lock.json @@ -18,7 +18,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/linux-x64-vulkan/package.json b/packages/@node-llama-cpp/linux-x64-vulkan/package.json index 799e7a4f..ec0295c9 100644 --- a/packages/@node-llama-cpp/linux-x64-vulkan/package.json +++ b/packages/@node-llama-cpp/linux-x64-vulkan/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["linux"], "cpu": ["x64"], diff --git a/packages/@node-llama-cpp/linux-x64/package-lock.json b/packages/@node-llama-cpp/linux-x64/package-lock.json index e7a97869..5ec6f013 100644 --- a/packages/@node-llama-cpp/linux-x64/package-lock.json +++ b/packages/@node-llama-cpp/linux-x64/package-lock.json @@ -18,7 +18,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/linux-x64/package.json b/packages/@node-llama-cpp/linux-x64/package.json index a01a5454..7afc3ac5 100644 --- a/packages/@node-llama-cpp/linux-x64/package.json +++ b/packages/@node-llama-cpp/linux-x64/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["linux"], "cpu": ["x64"], diff --git a/packages/@node-llama-cpp/mac-arm64-metal/package-lock.json b/packages/@node-llama-cpp/mac-arm64-metal/package-lock.json index 56ec5729..7c1e50f0 100644 --- a/packages/@node-llama-cpp/mac-arm64-metal/package-lock.json +++ b/packages/@node-llama-cpp/mac-arm64-metal/package-lock.json @@ -19,7 +19,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/mac-arm64-metal/package.json b/packages/@node-llama-cpp/mac-arm64-metal/package.json index cec0aa6d..a8b775de 100644 --- a/packages/@node-llama-cpp/mac-arm64-metal/package.json +++ b/packages/@node-llama-cpp/mac-arm64-metal/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["darwin"], "cpu": ["arm64", "x64"], diff --git a/packages/@node-llama-cpp/mac-x64/package-lock.json b/packages/@node-llama-cpp/mac-x64/package-lock.json index 59a2bd2c..a080e6a3 100644 --- a/packages/@node-llama-cpp/mac-x64/package-lock.json +++ b/packages/@node-llama-cpp/mac-x64/package-lock.json @@ -18,7 +18,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/mac-x64/package.json b/packages/@node-llama-cpp/mac-x64/package.json index 60abd528..578544c1 100644 --- a/packages/@node-llama-cpp/mac-x64/package.json +++ b/packages/@node-llama-cpp/mac-x64/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["darwin"], "cpu": ["x64"], diff --git a/packages/@node-llama-cpp/win-arm64/package-lock.json b/packages/@node-llama-cpp/win-arm64/package-lock.json index 3de41a54..a7263ce2 100644 --- a/packages/@node-llama-cpp/win-arm64/package-lock.json +++ b/packages/@node-llama-cpp/win-arm64/package-lock.json @@ -19,7 +19,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/win-arm64/package.json b/packages/@node-llama-cpp/win-arm64/package.json index 14640133..6141f6e5 100644 --- a/packages/@node-llama-cpp/win-arm64/package.json +++ b/packages/@node-llama-cpp/win-arm64/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["win32"], "cpu": ["arm64", "x64"], diff --git a/packages/@node-llama-cpp/win-x64-cuda/package-lock.json b/packages/@node-llama-cpp/win-x64-cuda/package-lock.json index 9d2abece..f258ab6c 100644 --- a/packages/@node-llama-cpp/win-x64-cuda/package-lock.json +++ b/packages/@node-llama-cpp/win-x64-cuda/package-lock.json @@ -18,7 +18,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/win-x64-cuda/package.json b/packages/@node-llama-cpp/win-x64-cuda/package.json index 3449db2c..7bc17f53 100644 --- a/packages/@node-llama-cpp/win-x64-cuda/package.json +++ b/packages/@node-llama-cpp/win-x64-cuda/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["win32"], "cpu": ["x64"], diff --git a/packages/@node-llama-cpp/win-x64-vulkan/package-lock.json b/packages/@node-llama-cpp/win-x64-vulkan/package-lock.json index 780c05ba..cae12dd0 100644 --- a/packages/@node-llama-cpp/win-x64-vulkan/package-lock.json +++ b/packages/@node-llama-cpp/win-x64-vulkan/package-lock.json @@ -18,7 +18,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/win-x64-vulkan/package.json b/packages/@node-llama-cpp/win-x64-vulkan/package.json index bad56ecd..a2166361 100644 --- a/packages/@node-llama-cpp/win-x64-vulkan/package.json +++ b/packages/@node-llama-cpp/win-x64-vulkan/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["win32"], "cpu": ["x64"], diff --git a/packages/@node-llama-cpp/win-x64/package-lock.json b/packages/@node-llama-cpp/win-x64/package-lock.json index 5c2ab298..b4e0159f 100644 --- a/packages/@node-llama-cpp/win-x64/package-lock.json +++ b/packages/@node-llama-cpp/win-x64/package-lock.json @@ -18,7 +18,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/typescript": { diff --git a/packages/@node-llama-cpp/win-x64/package.json b/packages/@node-llama-cpp/win-x64/package.json index 3e58c1c1..f8068150 100644 --- a/packages/@node-llama-cpp/win-x64/package.json +++ b/packages/@node-llama-cpp/win-x64/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "os": ["win32"], "cpu": ["x64"], diff --git a/packages/create-node-llama-cpp/package-lock.json b/packages/create-node-llama-cpp/package-lock.json index b9cf550e..bc662b64 100644 --- a/packages/create-node-llama-cpp/package-lock.json +++ b/packages/create-node-llama-cpp/package-lock.json @@ -18,7 +18,7 @@ "typescript": "^5.2.2" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "funding": { "type": "github", @@ -102,7 +102,7 @@ "zx": "^7.2.3" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "funding": { "type": "github", diff --git a/packages/create-node-llama-cpp/package.json b/packages/create-node-llama-cpp/package.json index 723568d8..e1e11321 100644 --- a/packages/create-node-llama-cpp/package.json +++ b/packages/create-node-llama-cpp/package.json @@ -23,7 +23,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "scripts": { "prebuild": "rimraf ./dist ./tsconfig.tsbuildinfo", diff --git a/src/ChatWrapper.ts b/src/ChatWrapper.ts index 7a2e8354..acd8a4e5 100644 --- a/src/ChatWrapper.ts +++ b/src/ChatWrapper.ts @@ -8,6 +8,7 @@ import {ChatModelFunctionsDocumentationGenerator} from "./chatWrappers/utils/Cha import {jsonDumps} from "./chatWrappers/utils/jsonDumps.js"; import {defaultChatSystemPrompt} from "./config.js"; import {getChatWrapperSegmentDefinition} from "./utils/getChatWrapperSegmentDefinition.js"; +import type {JinjaTemplateChatWrapperOptions} from "./chatWrappers/generic/JinjaTemplateChatWrapper.js"; export abstract class ChatWrapper { public static defaultSettings: ChatWrapperSettings = { @@ -17,13 +18,15 @@ export abstract class ChatWrapper { optionalPrefixSpace: true, prefix: "||call: ", paramsPrefix: LlamaText(new SpecialTokensText("(")), - suffix: LlamaText(new SpecialTokensText(")")) + suffix: LlamaText(new SpecialTokensText(")")), + emptyCallParamsPlaceholder: "" }, result: { prefix: LlamaText(new SpecialTokensText("\n"), "||result: "), suffix: LlamaText(new SpecialTokensText("\n")) } - } + }, + segments: {} }; public abstract readonly wrapperName: string; @@ -102,13 +105,16 @@ export abstract class ChatWrapper { } public generateFunctionCall(name: string, params: any): LlamaText { + const emptyCallParamsPlaceholder = this.settings.functions.call.emptyCallParamsPlaceholder; return LlamaText([ this.settings.functions.call.prefix, name, this.settings.functions.call.paramsPrefix, ( params === undefined - ? "" + ? (emptyCallParamsPlaceholder === undefined || emptyCallParamsPlaceholder === "") + ? "" + : jsonDumps(emptyCallParamsPlaceholder) : jsonDumps(params) ), this.settings.functions.call.suffix @@ -139,7 +145,7 @@ export abstract class ChatWrapper { ]); } - public generateModelResponseText(modelResponse: ChatModelResponse["response"], useRawCall: boolean = true): LlamaText { + public generateModelResponseText(modelResponse: ChatModelResponse["response"], useRawValues: boolean = true): LlamaText { const res: LlamaText[] = []; const pendingFunctionCalls: ChatModelFunctionCall[] = []; const segmentStack: ChatModelSegmentType[] = []; @@ -150,7 +156,7 @@ export abstract class ChatWrapper { if (pendingFunctionCalls.length === 0) return; - res.push(this.generateFunctionCallsAndResults(pendingFunctionCalls, useRawCall)); + res.push(this.generateFunctionCallsAndResults(pendingFunctionCalls, useRawValues)); pendingFunctionCalls.length = 0; needsToAddSegmentReminder = true; }; @@ -180,11 +186,10 @@ export abstract class ChatWrapper { } else if (isChatModelResponseSegment(response)) { addFunctionCalls(); - if (response.raw != null && useRawCall) + const segmentDefinition = getChatWrapperSegmentDefinition(this.settings, response.segmentType); + if (response.raw != null && useRawValues) res.push(LlamaText.fromJSON(response.raw)); - else { - const segmentDefinition = getChatWrapperSegmentDefinition(this.settings, response.segmentType); - + else res.push( LlamaText([ (segmentStack.length > 0 && segmentStack.at(-1) === response.segmentType) @@ -197,12 +202,15 @@ export abstract class ChatWrapper { ]) ); - lastSegmentEndedWithoutSuffix = response.ended && segmentDefinition?.suffix == null; + lastSegmentEndedWithoutSuffix = response.ended && segmentDefinition?.suffix == null; + + if (!response.ended && segmentStack.at(-1) !== response.segmentType) + segmentStack.push(response.segmentType); + else if (response.ended && segmentStack.at(-1) === response.segmentType) { + segmentStack.pop(); - if (!response.ended) - segmentStack.push(response.segmentType); - else if (segmentStack.at(-1) === response.segmentType) - segmentStack.pop(); + if (segmentStack.length === 0 && segmentDefinition?.suffix == null && this.settings.segments?.closeAllSegments != null) + res.push(LlamaText(this.settings.segments.closeAllSegments)); } continue; @@ -277,9 +285,7 @@ export abstract class ChatWrapper { } /** @internal */ - public static _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ( - Array | [testConfig: Record, applyConfig: Record]> - ) { + public static _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [{}] satisfies ChatWrapperJinjaMatchConfiguration; } @@ -293,13 +299,9 @@ type FirstItemOfTupleOrFallback = T extends [infer U, export type ChatWrapperJinjaMatchConfiguration = Array< FirstItemOfTupleOrFallback, object> | - [ - testConfig: FirstItemOfTupleOrFallback, object>, - applyConfig: FirstItemOfTupleOrFallback, object> - ] | [ testConfig: FirstItemOfTupleOrFallback, object>, applyConfig: FirstItemOfTupleOrFallback, object>, - testJinjaParameters: Record + testJinjaChatWrapperOptions?: JinjaTemplateChatWrapperOptions ] >; diff --git a/src/bindings/AddonTypes.ts b/src/bindings/AddonTypes.ts index c88c1af7..c6d8a6cd 100644 --- a/src/bindings/AddonTypes.ts +++ b/src/bindings/AddonTypes.ts @@ -143,7 +143,7 @@ export type AddonContext = { // startPos in inclusive, endPos is exclusive shiftSequenceTokenCells(sequenceId: number, startPos: number, endPos: number, shiftDelta: number): void, - getEmbedding(inputTokensLength: number): Float64Array, + getEmbedding(inputTokensLength: number, maxVectorSize?: number): Float64Array, getStateSize(): number, getThreads(): number, setThreads(threads: number): void, diff --git a/src/bindings/Llama.ts b/src/bindings/Llama.ts index e894521d..ad025b09 100644 --- a/src/bindings/Llama.ts +++ b/src/bindings/Llama.ts @@ -638,6 +638,8 @@ function getTransformedLogLevel(level: LlamaLogLevel, message: string): LlamaLog return LlamaLogLevel.log; else if (level === LlamaLogLevel.info && message.startsWith("load_backend: loaded ")) return LlamaLogLevel.log; + else if (level === LlamaLogLevel.warn && message.startsWith("make_cpu_buft_list: disabling extra buffer types")) + return LlamaLogLevel.info; return level; } diff --git a/src/chatWrappers/AlpacaChatWrapper.ts b/src/chatWrappers/AlpacaChatWrapper.ts index 22454587..a1ccd1fb 100644 --- a/src/chatWrappers/AlpacaChatWrapper.ts +++ b/src/chatWrappers/AlpacaChatWrapper.ts @@ -35,10 +35,10 @@ export class AlpacaChatWrapper extends GeneralChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ {}, {allowSpecialTokensInTitles: true} - ] satisfies ChatWrapperJinjaMatchConfiguration; + ]; } } diff --git a/src/chatWrappers/DeepSeekChatWrapper.ts b/src/chatWrappers/DeepSeekChatWrapper.ts index b65c5385..168a8e38 100644 --- a/src/chatWrappers/DeepSeekChatWrapper.ts +++ b/src/chatWrappers/DeepSeekChatWrapper.ts @@ -348,14 +348,14 @@ export class DeepSeekChatWrapper extends ChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ - {}, - {keepOnlyLastThought: true}, - {functionCallingSyntax: "simplified"}, - {functionCallingSyntax: "simplified", keepOnlyLastThought: true}, - {functionCallingSyntax: "original"}, - {functionCallingSyntax: "original", keepOnlyLastThought: true} - ] satisfies ChatWrapperJinjaMatchConfiguration; + [undefined, {}, {functionCallMessageTemplate: "noJinja"}], + [undefined, {keepOnlyLastThought: true}, {functionCallMessageTemplate: "noJinja"}], + [undefined, {functionCallingSyntax: "simplified"}, {functionCallMessageTemplate: "noJinja"}], + [undefined, {functionCallingSyntax: "simplified", keepOnlyLastThought: true}, {functionCallMessageTemplate: "noJinja"}], + [undefined, {functionCallingSyntax: "original"}, {functionCallMessageTemplate: "noJinja"}], + [undefined, {functionCallingSyntax: "original", keepOnlyLastThought: true}, {functionCallMessageTemplate: "noJinja"}] + ]; } } diff --git a/src/chatWrappers/FalconChatWrapper.ts b/src/chatWrappers/FalconChatWrapper.ts index 50198535..6d593078 100644 --- a/src/chatWrappers/FalconChatWrapper.ts +++ b/src/chatWrappers/FalconChatWrapper.ts @@ -154,10 +154,10 @@ export class FalconChatWrapper extends ChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ {}, {allowSpecialTokensInTitles: true} - ] satisfies ChatWrapperJinjaMatchConfiguration; + ]; } } diff --git a/src/chatWrappers/FunctionaryChatWrapper.ts b/src/chatWrappers/FunctionaryChatWrapper.ts index b35df672..402939b1 100644 --- a/src/chatWrappers/FunctionaryChatWrapper.ts +++ b/src/chatWrappers/FunctionaryChatWrapper.ts @@ -204,7 +204,7 @@ export class FunctionaryChatWrapper extends ChatWrapper { : LlamaText([ new SpecialTokensText(">>>all\n"), response, - (isLastItem && isLastResponse) + (!isLastResponse || isLastItem) ? LlamaText([]) : new SpecialToken("EOT") ]) @@ -726,12 +726,12 @@ export class FunctionaryChatWrapper extends ChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ {variation: "v3"}, {variation: "v2.llama3"}, {variation: "v2"} - ] satisfies ChatWrapperJinjaMatchConfiguration; + ]; } } diff --git a/src/chatWrappers/GeneralChatWrapper.ts b/src/chatWrappers/GeneralChatWrapper.ts index bb5d66d4..f7ac31b5 100644 --- a/src/chatWrappers/GeneralChatWrapper.ts +++ b/src/chatWrappers/GeneralChatWrapper.ts @@ -173,10 +173,10 @@ export class GeneralChatWrapper extends ChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ {}, {allowSpecialTokensInTitles: true} - ] satisfies ChatWrapperJinjaMatchConfiguration; + ]; } } diff --git a/src/chatWrappers/Llama2ChatWrapper.ts b/src/chatWrappers/Llama2ChatWrapper.ts index 3c9ec5a5..f8d9846a 100644 --- a/src/chatWrappers/Llama2ChatWrapper.ts +++ b/src/chatWrappers/Llama2ChatWrapper.ts @@ -115,10 +115,10 @@ export class Llama2ChatWrapper extends ChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ {addSpaceBeforeEos: false}, {addSpaceBeforeEos: true} - ] satisfies ChatWrapperJinjaMatchConfiguration; + ]; } } diff --git a/src/chatWrappers/Llama3_1ChatWrapper.ts b/src/chatWrappers/Llama3_1ChatWrapper.ts index b1ebf7e5..36290259 100644 --- a/src/chatWrappers/Llama3_1ChatWrapper.ts +++ b/src/chatWrappers/Llama3_1ChatWrapper.ts @@ -341,15 +341,19 @@ export class Llama3_1ChatWrapper extends ChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ - {}, - [{todayDate: null}, {}], - [{cuttingKnowledgeDate: null}, {}], - [{noToolInstructions: true}, {}], - [{todayDate: null, cuttingKnowledgeDate: null}, {}], - [{todayDate: null, cuttingKnowledgeDate: null, noToolInstructions: true}, {}], - [{todayDate: new Date("2024-07-26T00:00:00"), cuttingKnowledgeDate: null, noToolInstructions: true}, {}], + [{}, undefined, {functionCallMessageTemplate: "noJinja"}], + [{todayDate: null}, {}, {functionCallMessageTemplate: "noJinja"}], + [{cuttingKnowledgeDate: null}, {}, {functionCallMessageTemplate: "noJinja"}], + [{noToolInstructions: true}, {}, {functionCallMessageTemplate: "noJinja"}], + [{todayDate: null, cuttingKnowledgeDate: null}, {}, {functionCallMessageTemplate: "noJinja"}], + [{todayDate: null, cuttingKnowledgeDate: null, noToolInstructions: true}, {}, {functionCallMessageTemplate: "noJinja"}], + [ + {todayDate: new Date("2024-07-26T00:00:00"), cuttingKnowledgeDate: null, noToolInstructions: true}, + {}, + {functionCallMessageTemplate: "noJinja"} + ], [ { @@ -358,7 +362,10 @@ export class Llama3_1ChatWrapper extends ChatWrapper { noToolInstructions: true }, {cuttingKnowledgeDate: new Date("2023-12-01T00:00:00Z")}, - {"date_string": formatDate(new Date("2024-07-26T00:00:00"), undefined)} + { + additionalRenderParameters: {"date_string": formatDate(new Date("2024-07-26T00:00:00"), undefined)}, + functionCallMessageTemplate: "noJinja" + } ], [ @@ -369,9 +376,12 @@ export class Llama3_1ChatWrapper extends ChatWrapper { _specialTokensTextForPreamble: true }, {cuttingKnowledgeDate: new Date("2023-12-01T00:00:00Z")}, - {"date_string": formatDate(new Date("2024-07-26T00:00:00"), undefined)} + { + additionalRenderParameters: {"date_string": formatDate(new Date("2024-07-26T00:00:00"), undefined)}, + functionCallMessageTemplate: "noJinja" + } ] - ] satisfies ChatWrapperJinjaMatchConfiguration; + ]; } } diff --git a/src/chatWrappers/Llama3_2LightweightChatWrapper.ts b/src/chatWrappers/Llama3_2LightweightChatWrapper.ts index 518a679c..a9f6bb89 100644 --- a/src/chatWrappers/Llama3_2LightweightChatWrapper.ts +++ b/src/chatWrappers/Llama3_2LightweightChatWrapper.ts @@ -24,7 +24,8 @@ export class Llama3_2LightweightChatWrapper extends ChatWrapper { optionalPrefixSpace: true, prefix: '{"name": "', paramsPrefix: '", "parameters": ', - suffix: LlamaText("}", new SpecialToken("EOT")) + suffix: LlamaText("}", new SpecialToken("EOT")), + emptyCallParamsPlaceholder: {} }, result: { prefix: LlamaText(new SpecialToken("EOT"), new SpecialTokensText("<|start_header_id|>ipython<|end_header_id|>\n\n")), @@ -312,15 +313,19 @@ export class Llama3_2LightweightChatWrapper extends ChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ - {}, - [{todayDate: null}, {}], - [{cuttingKnowledgeDate: null}, {}], - [{noToolInstructions: true}, {}], - [{todayDate: null, cuttingKnowledgeDate: null}, {}], - [{todayDate: null, cuttingKnowledgeDate: null, noToolInstructions: true}, {}], - [{todayDate: new Date("2024-07-26T00:00:00"), cuttingKnowledgeDate: null, noToolInstructions: true}, {}], + [{}, undefined, {functionCallMessageTemplate: "noJinja"}], + [{todayDate: null}, {}, {functionCallMessageTemplate: "noJinja"}], + [{cuttingKnowledgeDate: null}, {}, {functionCallMessageTemplate: "noJinja"}], + [{noToolInstructions: true}, {}, {functionCallMessageTemplate: "noJinja"}], + [{todayDate: null, cuttingKnowledgeDate: null}, {}, {functionCallMessageTemplate: "noJinja"}], + [{todayDate: null, cuttingKnowledgeDate: null, noToolInstructions: true}, {}, {functionCallMessageTemplate: "noJinja"}], + [ + {todayDate: new Date("2024-07-26T00:00:00"), cuttingKnowledgeDate: null, noToolInstructions: true}, + {}, + {functionCallMessageTemplate: "noJinja"} + ], [ { @@ -329,7 +334,10 @@ export class Llama3_2LightweightChatWrapper extends ChatWrapper { noToolInstructions: true }, {cuttingKnowledgeDate: new Date("2023-12-01T00:00:00Z")}, - {"date_string": formatDate(new Date("2024-07-26T00:00:00"), undefined)} + { + additionalRenderParameters: {"date_string": formatDate(new Date("2024-07-26T00:00:00"), undefined)}, + functionCallMessageTemplate: "noJinja" + } ], [ @@ -340,9 +348,12 @@ export class Llama3_2LightweightChatWrapper extends ChatWrapper { _specialTokensTextForPreamble: true }, {cuttingKnowledgeDate: new Date("2023-12-01T00:00:00Z")}, - {"date_string": formatDate(new Date("2024-07-26T00:00:00"), undefined)} + { + additionalRenderParameters: {"date_string": formatDate(new Date("2024-07-26T00:00:00"), undefined)}, + functionCallMessageTemplate: "noJinja" + } ] - ] satisfies ChatWrapperJinjaMatchConfiguration; + ]; } } diff --git a/src/chatWrappers/MistralChatWrapper.ts b/src/chatWrappers/MistralChatWrapper.ts index 3b3bc593..72321c80 100644 --- a/src/chatWrappers/MistralChatWrapper.ts +++ b/src/chatWrappers/MistralChatWrapper.ts @@ -14,47 +14,57 @@ import {chunkChatItems} from "./utils/chunkChatItems.js"; export class MistralChatWrapper extends ChatWrapper { public readonly wrapperName: string = "Mistral"; - public override readonly settings: ChatWrapperSettings = { - supportsSystemMessages: true, - functions: { - call: { - optionalPrefixSpace: true, - prefix: '{"name": "', - paramsPrefix: '", "arguments": ', - suffix: "}" - }, - result: { - prefix: '{"name": "{{functionName}}", "content": ', - suffix: "}" - }, - parallelism: { - call: { - sectionPrefix: LlamaText(new SpecialTokensText("[TOOL_CALLS]"), "["), - betweenCalls: ", ", - sectionSuffix: LlamaText("]", new SpecialToken("EOS")) - }, - result: { - sectionPrefix: LlamaText(new SpecialTokensText("[TOOL_RESULTS]"), "["), - betweenResults: ", ", - sectionSuffix: LlamaText("]", new SpecialTokensText("[/TOOL_RESULTS]")) - } - } - } - }; + public override readonly settings: ChatWrapperSettings; /** @internal */ private readonly _addSpaceBeforeEos: boolean; + /** @internal */ private readonly _stringifyFunctionCallResult: boolean; - public constructor({ - addSpaceBeforeEos = false - }: { + public constructor(options: { /** * Default to `true` */ - addSpaceBeforeEos?: boolean + addSpaceBeforeEos?: boolean, + + /** @internal */ + _noFunctionNameInResult?: boolean, + + /** @internal */ + _stringifyFunctionCallResult?: boolean } = {}) { super(); + const { + addSpaceBeforeEos = false, + _noFunctionNameInResult = false, + _stringifyFunctionCallResult = false + } = options; this._addSpaceBeforeEos = addSpaceBeforeEos; + this._stringifyFunctionCallResult = _stringifyFunctionCallResult; + this.settings = { + supportsSystemMessages: true, + functions: { + call: { + optionalPrefixSpace: true, + prefix: '{"name": "', + paramsPrefix: '", "arguments": ', + suffix: "}", + emptyCallParamsPlaceholder: {} + }, + result: { + prefix: _noFunctionNameInResult + ? LlamaText(new SpecialTokensText("[TOOL_RESULTS]"), '{"content": ') + : LlamaText(new SpecialTokensText("[TOOL_RESULTS]"), '{"name": "{{functionName}}", "content": '), + suffix: LlamaText("}", new SpecialTokensText("[/TOOL_RESULTS]")) + }, + parallelism: { + call: { + sectionPrefix: LlamaText(new SpecialTokensText("[TOOL_CALLS]"), "["), + betweenCalls: ", ", + sectionSuffix: LlamaText("]", new SpecialToken("EOS")) + } + } + } + }; } public override addAvailableFunctionsSystemMessageToHistory(history: readonly ChatHistoryItem[]) { @@ -148,6 +158,13 @@ export class MistralChatWrapper extends ChatWrapper { }]; } + public override generateFunctionCallResult(functionName: string, functionParams: any, result: any) { + if (this._stringifyFunctionCallResult && result !== undefined) + return super.generateFunctionCallResult(functionName, functionParams, jsonDumps(result)); + + return super.generateFunctionCallResult(functionName, functionParams, result); + } + /** @internal */ private _generateAvailableToolsText({ availableFunctions, @@ -215,10 +232,14 @@ export class MistralChatWrapper extends ChatWrapper { } /** @internal */ - public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate() { + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { return [ - {addSpaceBeforeEos: false}, - {addSpaceBeforeEos: true} - ] satisfies ChatWrapperJinjaMatchConfiguration; + [{addSpaceBeforeEos: false, _noFunctionNameInResult: true, _stringifyFunctionCallResult: true}, {addSpaceBeforeEos: false}], + [{addSpaceBeforeEos: true, _noFunctionNameInResult: true, _stringifyFunctionCallResult: true}, {addSpaceBeforeEos: true}], + [{addSpaceBeforeEos: false, _noFunctionNameInResult: true}, {addSpaceBeforeEos: false}], + [{addSpaceBeforeEos: true, _noFunctionNameInResult: true}, {addSpaceBeforeEos: true}], + [{addSpaceBeforeEos: false}, {addSpaceBeforeEos: false}], + [{addSpaceBeforeEos: true}, {addSpaceBeforeEos: true}] + ]; } } diff --git a/src/chatWrappers/QwenChatWrapper.ts b/src/chatWrappers/QwenChatWrapper.ts new file mode 100644 index 00000000..6ac44a97 --- /dev/null +++ b/src/chatWrappers/QwenChatWrapper.ts @@ -0,0 +1,216 @@ +import {ChatWrapper, ChatWrapperJinjaMatchConfiguration} from "../ChatWrapper.js"; +import { + ChatModelFunctions, ChatWrapperCheckModelCompatibilityParams, ChatWrapperGenerateContextStateOptions, ChatWrapperGeneratedContextState, + ChatWrapperSettings, isChatModelResponseSegment +} from "../types.js"; +import {LlamaText, SpecialToken, SpecialTokensText} from "../utils/LlamaText.js"; +import {GgufArchitectureType} from "../gguf/types/GgufMetadataTypes.js"; +import {ChatModelFunctionsDocumentationGenerator} from "./utils/ChatModelFunctionsDocumentationGenerator.js"; + +// source: https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M/blob/main/tokenizer_config.json#L197 +export class QwenChatWrapper extends ChatWrapper { + public readonly wrapperName: string = "Qwen"; + + public readonly keepOnlyLastThought: boolean; + + public override readonly settings: ChatWrapperSettings = { + supportsSystemMessages: true, + functions: { + call: { + optionalPrefixSpace: true, + prefix: LlamaText("\n", new SpecialTokensText(""), '\n{"name": "'), + paramsPrefix: '", "arguments": ', + suffix: LlamaText("}\n", new SpecialTokensText("")), + emptyCallParamsPlaceholder: {} + }, + result: { + prefix: LlamaText(new SpecialTokensText("\n\n")), + suffix: LlamaText(new SpecialTokensText("\n")) + }, + parallelism: { + call: { + sectionPrefix: "", + sectionSuffix: LlamaText(new SpecialTokensText("<|im_end|>\n")) + }, + result: { + sectionPrefix: LlamaText(new SpecialTokensText("<|im_start|>user")), + sectionSuffix: LlamaText(new SpecialTokensText("<|im_end|>\n<|im_start|>assistant\n")) + } + } + }, + segments: { + reiterateStackAfterFunctionCalls: true, + thought: { + prefix: LlamaText(new SpecialTokensText("")), + suffix: LlamaText(new SpecialTokensText("")) + } + } + }; + + public constructor(options: { + /** + * Whether to keep only the chain of thought from the last model response. + * + * Setting this to `false` will keep all the chain of thoughts from the model responses in the context state. + * + * Defaults to `true`. + */ + keepOnlyLastThought?: boolean + } = {}) { + super(); + + const { + keepOnlyLastThought = true + } = options; + + this.keepOnlyLastThought = keepOnlyLastThought; + } + + public override generateContextState({ + chatHistory, availableFunctions, documentFunctionParams + }: ChatWrapperGenerateContextStateOptions): ChatWrapperGeneratedContextState { + const historyWithFunctions = this.addAvailableFunctionsSystemMessageToHistory(chatHistory, availableFunctions, { + documentParams: documentFunctionParams + }); + + const resultItems: Array<{ + system: LlamaText, + user: LlamaText, + model: LlamaText + }> = []; + + let systemTexts: LlamaText[] = []; + let userTexts: LlamaText[] = []; + let modelTexts: LlamaText[] = []; + let currentAggregateFocus: "system" | null = null; + + function flush() { + if (systemTexts.length > 0 || userTexts.length > 0 || modelTexts.length > 0) + resultItems.push({ + system: LlamaText.joinValues("\n\n", systemTexts), + user: LlamaText.joinValues("\n\n", userTexts), + model: LlamaText.joinValues("\n\n", modelTexts) + }); + + systemTexts = []; + userTexts = []; + modelTexts = []; + } + + for (let i = 0; i < historyWithFunctions.length; i++) { + const item = historyWithFunctions[i]!; + const isLastItem = i === historyWithFunctions.length - 1; + + if (item.type === "system") { + if (currentAggregateFocus !== "system") + flush(); + + currentAggregateFocus = "system"; + systemTexts.push(LlamaText.fromJSON(item.text)); + } else if (item.type === "user") { + flush(); + + currentAggregateFocus = null; + userTexts.push(LlamaText(item.text)); + } else if (item.type === "model") { + flush(); + + currentAggregateFocus = null; + modelTexts.push( + this.generateModelResponseText( + (this.keepOnlyLastThought && !isLastItem) + ? item.response.filter((response) => ( + !isChatModelResponseSegment(response) || response.segmentType !== "thought" + )) + : item.response + ) + ); + } else + void (item satisfies never); + } + + flush(); + + const contextText = LlamaText( + resultItems.map(({system, user, model}, index) => { + const isLastItem = index === resultItems.length - 1; + + return LlamaText([ + (system.values.length === 0) + ? LlamaText([]) + : LlamaText([ + new SpecialTokensText("<|im_start|>system\n"), + system, + new SpecialTokensText("<|im_end|>\n") + ]), + + (user.values.length === 0) + ? LlamaText([]) + : LlamaText([ + new SpecialTokensText("<|im_start|>user\n"), + user, + new SpecialTokensText("<|im_end|>\n") + ]), + + (model.values.length === 0 && !isLastItem) + ? LlamaText([]) + : LlamaText([ + new SpecialTokensText("<|im_start|>assistant\n"), + model, + + isLastItem + ? LlamaText([]) + : new SpecialTokensText("<|im_end|>\n") + ]) + ]); + }) + ); + + return { + contextText, + stopGenerationTriggers: [ + LlamaText(new SpecialToken("EOS")), + LlamaText(new SpecialTokensText("<|im_end|>")), + LlamaText("<|im_end|>") + ] + }; + } + + public override generateAvailableFunctionsSystemText(availableFunctions: ChatModelFunctions, {documentParams = true}: { + documentParams?: boolean + }) { + const functionsDocumentationGenerator = new ChatModelFunctionsDocumentationGenerator(availableFunctions); + + if (!functionsDocumentationGenerator.hasAnyFunctions) + return LlamaText([]); + + return LlamaText.joinValues("\n", [ + "# Tools", + "", + "You may call one or more functions to assist with the user query.", + "", + LlamaText("You are provided with function signatures within ", new SpecialTokensText(""), " XML tags:"), + LlamaText(new SpecialTokensText("")), + functionsDocumentationGenerator.getQwenFunctionSignatures({documentParams}), + LlamaText(new SpecialTokensText("")), + "", + LlamaText("For each function call, return a json object with function name and arguments within ", new SpecialTokensText(""), " XML tags:"), + LlamaText(new SpecialTokensText("")), + '{"name": , "arguments": }', + LlamaText(new SpecialTokensText("")) + ]); + } + + /** @internal */ + public static override _checkModelCompatibility(options: ChatWrapperCheckModelCompatibilityParams): boolean { + const architecture = options.fileInfo?.metadata.general.architecture; + return architecture == null || architecture === GgufArchitectureType.qwen2; + } + + /** @internal */ + public static override _getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate(): ChatWrapperJinjaMatchConfiguration { + return [ + [undefined, {}, {_requireFunctionCallSettingsExtraction: true}] + ]; + } +} diff --git a/src/chatWrappers/generic/JinjaTemplateChatWrapper.ts b/src/chatWrappers/generic/JinjaTemplateChatWrapper.ts index 3f656a18..642173cc 100644 --- a/src/chatWrappers/generic/JinjaTemplateChatWrapper.ts +++ b/src/chatWrappers/generic/JinjaTemplateChatWrapper.ts @@ -1,14 +1,26 @@ import {Template} from "@huggingface/jinja"; import {splitText} from "lifecycle-utils"; import { - ChatHistoryItem, ChatUserMessage, ChatWrapperGenerateContextStateOptions, ChatWrapperGeneratedContextState, ChatWrapperSettings + ChatHistoryItem, ChatModelFunctions, ChatUserMessage, ChatWrapperGenerateContextStateOptions, ChatWrapperGeneratedContextState, + ChatWrapperSettings, Tokenizer } from "../../types.js"; import {SpecialToken, LlamaText, SpecialTokensText} from "../../utils/LlamaText.js"; import {ChatWrapper} from "../../ChatWrapper.js"; +import { + fromChatHistoryToIntermediateOpenAiMessages, fromIntermediateToCompleteOpenAiMessages, IntermediateOpenAiMessage, + OpenAiChatAssistantMessage, OpenAiChatMessage +} from "../../utils/OpenAIFormat.js"; +import {removeUndefinedFields} from "../../utils/removeNullFields.js"; +import {jsonDumps} from "../utils/jsonDumps.js"; +import {tryMatrix} from "../../utils/optionsMatrix.js"; import {ChatHistoryFunctionCallMessageTemplate, parseFunctionCallMessageTemplate} from "./utils/chatHistoryFunctionCallMessageTemplate.js"; import { templateSegmentOptionsToChatWrapperSettings, TemplateChatWrapperSegmentsOptions } from "./utils/templateSegmentOptionsToChatWrapperSettings.js"; +import {UniqueIdGenerator} from "./utils/UniqueIdGenerator.js"; +import {extractFunctionCallSettingsFromJinjaTemplate} from "./utils/extractFunctionCallSettingsFromJinjaTemplate.js"; +import {squashChatHistoryItems} from "./utils/squashChatHistoryItems.js"; +import {extractSegmentSettingsFromTokenizerAndChatTemplate} from "./utils/extractSegmentSettingsFromTokenizerAndChatTemplate.js"; export type JinjaTemplateChatWrapperOptions = { template: string, @@ -44,7 +56,19 @@ export type JinjaTemplateChatWrapperOptions = { * Defaults to `"auto"`. */ convertUnsupportedSystemMessagesToUserMessages?: "auto" | boolean | JinjaTemplateChatWrapperOptionsConvertMessageFormat, - functionCallMessageTemplate?: ChatHistoryFunctionCallMessageTemplate, + + /** + * Template format for how functions can be called by the model and how their results are fed to the model after function calls. + * + * - **`"auto"`**: Extract the function call message template from the Jinja template. + * Fallback to the default template if not found. + * - **`"noJinja"`**: Use the default template. + * - **Custom template**: Use the specified {@link ChatHistoryFunctionCallMessageTemplate template}. + * See {@link ChatHistoryFunctionCallMessageTemplate `ChatHistoryFunctionCallMessageTemplate`} for more details. + * + * Defaults to `"auto"`. + */ + functionCallMessageTemplate?: "auto" | "noJinja" | ChatHistoryFunctionCallMessageTemplate, /** * Whether to join adjacent messages of the same type. @@ -69,7 +93,17 @@ export type JinjaTemplateChatWrapperOptions = { /** * Format of the segments generated by the model (like thought segments) */ - segments?: TemplateChatWrapperSegmentsOptions + segments?: TemplateChatWrapperSegmentsOptions, + + /** + * Pass a model's tokenizer to attempt to detect common tokens used for chat formatting from it. + * + * Currently only used for detecting support for `` tags for thought segments. + */ + tokenizer?: Tokenizer, + + /** @internal */ + _requireFunctionCallSettingsExtraction?: boolean }; export type JinjaTemplateChatWrapperOptionsConvertMessageFormat = { @@ -124,24 +158,33 @@ export class JinjaTemplateChatWrapper extends ChatWrapper { public readonly additionalRenderParameters?: Record; /** @internal */ private readonly _jinjaTemplate: Template; + /** @internal */ private readonly _usingJinjaFunctionCallTemplate: boolean = false; + /** @internal */ private readonly _stringifyFunctionParams: boolean = false; + /** @internal */ private readonly _stringifyFunctionResult: boolean = false; + /** @internal */ private readonly _combineJinjaModelMessageAndToolCalls: boolean = true; + /** @internal */ private readonly _endJinjaMessagesWithUserMessage: boolean = false; /** * @param options */ - public constructor({ - template, - modelRoleName = "assistant", - userRoleName = "user", - systemRoleName = "system", - convertUnsupportedSystemMessagesToUserMessages = defaultConvertUnsupportedSystemMessagesToUserMessagesFormat, - functionCallMessageTemplate, - joinAdjacentMessagesOfTheSameType = true, - trimLeadingWhitespaceInResponses = true, - additionalRenderParameters, - segments - }: JinjaTemplateChatWrapperOptions) { + public constructor(options: JinjaTemplateChatWrapperOptions) { super(); + const { + template, + modelRoleName = "assistant", + userRoleName = "user", + systemRoleName = "system", + convertUnsupportedSystemMessagesToUserMessages = defaultConvertUnsupportedSystemMessagesToUserMessagesFormat, + functionCallMessageTemplate = "auto", + joinAdjacentMessagesOfTheSameType = true, + trimLeadingWhitespaceInResponses = true, + additionalRenderParameters, + segments, + tokenizer, + _requireFunctionCallSettingsExtraction = false + } = options; + if (template == null) throw new Error("template cannot be null"); @@ -155,69 +198,298 @@ export class JinjaTemplateChatWrapper extends ChatWrapper { this.trimLeadingWhitespaceInResponses = trimLeadingWhitespaceInResponses; this.additionalRenderParameters = additionalRenderParameters; + if (this.convertUnsupportedSystemMessagesToUserMessages != null && !this.convertUnsupportedSystemMessagesToUserMessages.format.includes("{{message}}")) + throw new Error('convertUnsupportedSystemMessagesToUserMessages format must include "{{message}}"'); + + this._jinjaTemplate = new Template(this.template); + this.settings = { ...ChatWrapper.defaultSettings, - functions: parseFunctionCallMessageTemplate(functionCallMessageTemplate) ?? ChatWrapper.defaultSettings.functions, segments: templateSegmentOptionsToChatWrapperSettings(segments) }; - if (this.convertUnsupportedSystemMessagesToUserMessages != null && !this.convertUnsupportedSystemMessagesToUserMessages.format.includes("{{message}}")) - throw new Error('convertUnsupportedSystemMessagesToUserMessages format must include "{{message}}"'); + const {supportsSystemMessages, needsToEndJinjaMessagesWithUserMessage} = this._runSanityTest(); + this.settings = { + ...this.settings, + supportsSystemMessages, + segments: { + ...this.settings.segments, + ...extractSegmentSettingsFromTokenizerAndChatTemplate(this.template, tokenizer) + } + }; - this._jinjaTemplate = new Template(this.template); + if (needsToEndJinjaMessagesWithUserMessage) + this._endJinjaMessagesWithUserMessage = true; + + let functionCallSettings = parseFunctionCallMessageTemplate( + (functionCallMessageTemplate === "auto" || functionCallMessageTemplate === "noJinja") + ? undefined + : functionCallMessageTemplate + ); + if (functionCallSettings == null && functionCallMessageTemplate !== "noJinja") { + try { + const idsGenerator = new UniqueIdGenerator( + this.template + this.modelRoleName + this.userRoleName + this.systemRoleName + + (this.convertUnsupportedSystemMessagesToUserMessages?.format ?? "") + ); + const extractedSettings = extractFunctionCallSettingsFromJinjaTemplate({ + idsGenerator, + renderTemplate: ({ + chatHistory, functions, additionalParams, stringifyFunctionParams, stringifyFunctionResults, + combineModelMessageAndToolCalls, squashModelTextResponses = true + }) => { + const render = ( + convertSystemMessagesToUserMessagesFormat: + JinjaTemplateChatWrapperOptionsConvertMessageFormat["format"] | undefined, + wipeFunctionCallIds: boolean | "align" + ) => { + const {messages: intermediateMessages, tools} = fromChatHistoryToIntermediateOpenAiMessages({ + chatHistory: this._transformChatHistory(chatHistory, { + convertSystemMessagesToUserMessagesFormat, + joinAdjacentMessagesOfTheSameType: !squashModelTextResponses + ? false + : undefined + }).transformedHistory, + chatWrapperSettings: this.settings, + useRawValues: false, + functions, + stringifyFunctionParams, + stringifyFunctionResults, + combineModelMessageAndToolCalls, + squashModelTextResponses + }); + + const messages = fromIntermediateToCompleteOpenAiMessages(intermediateMessages) + .map((item) => { + if (!wipeFunctionCallIds) + return item; + + if (item.role === "assistant" && item["tool_calls"] != null && item["tool_calls"].length > 0) { + for (const toolCall of item["tool_calls"]) { + if (wipeFunctionCallIds === "align") + toolCall.id = "fc_1_0001"; + else + delete (toolCall as {id?: string}).id; + } + } else if (item.role === "tool") { + if (wipeFunctionCallIds === "align") + item["tool_call_id"] = "fc_1_0001"; + else + delete (item as {"tool_call_id"?: string})["tool_call_id"]; + } + + return item; + }); + + const lastJinjaItem = messages.at(-1); + let eraseRenderedJinjaFromId: string | undefined; + if (this._endJinjaMessagesWithUserMessage && lastJinjaItem?.role === this.modelRoleName && + typeof lastJinjaItem.content === "string" && + lastJinjaItem.content.length > 0 && + ( + (lastJinjaItem as OpenAiChatAssistantMessage)["tool_calls"] == null || + (lastJinjaItem as OpenAiChatAssistantMessage)["tool_calls"]?.length === 0 + ) + ) { + eraseRenderedJinjaFromId = lastJinjaItem.content; + messages.push({ + role: this.userRoleName, + content: idsGenerator.generateId() + } as OpenAiChatMessage); + } + + let res = this._jinjaTemplate.render({ + ...( + this.additionalRenderParameters == null + ? {} + : structuredClone(this.additionalRenderParameters) + ), + ...additionalParams, + messages, + ...removeUndefinedFields({tools}) + }); + + if (eraseRenderedJinjaFromId != null) { + const eraseIndex = res.lastIndexOf(eraseRenderedJinjaFromId); + if (eraseIndex >= 0) + res = res.slice(0, eraseIndex + eraseRenderedJinjaFromId.length); + } + + // attempt to remove the ID pattern from the output + if (wipeFunctionCallIds === "align") + res = res + .replaceAll(/,\s*"(tool_call_id|call_id|id)":\s*"fc_1_0001"/g, "") + .replaceAll(/"(tool_call_id|call_id|id)":\s*"fc_1_0001"\s*,/g, ""); + + return res; + }; + + return tryMatrix({ + convertSystemMessagesToUserMessagesFormat: + getConvertUnsupportedSystemMessagesToUserMessagesTryOptions( + this.convertUnsupportedSystemMessagesToUserMessages + ), + wipeFunctionCallIds: [true, "align", false] + }, ({convertSystemMessagesToUserMessagesFormat, wipeFunctionCallIds}) => { + return render(convertSystemMessagesToUserMessagesFormat, wipeFunctionCallIds); + }); + } + }); + functionCallSettings = extractedSettings.settings; + + if (functionCallSettings != null) { + this._usingJinjaFunctionCallTemplate = true; + this._stringifyFunctionParams = extractedSettings.stringifyParams; + this._stringifyFunctionResult = extractedSettings.stringifyResult; + } + } catch (err) { + // do nothing + } + + if (functionCallSettings == null && _requireFunctionCallSettingsExtraction) + throw new Error("failed to extract function call settings from the Jinja template"); + } - const {supportsSystemMessages} = this._runSanityTest(); this.settings = { ...this.settings, - supportsSystemMessages + functions: functionCallSettings ?? ChatWrapper.defaultSettings.functions }; } + /** + * Whether the function call syntax settings were extracted from the given Jinja template. + * + * The function call syntax settings can be accessed using the `.settings.functions` property. + */ + public get usingJinjaFunctionCallTemplate() { + return this._usingJinjaFunctionCallTemplate; + } + public override generateContextState({ chatHistory, availableFunctions, documentFunctionParams }: ChatWrapperGenerateContextStateOptions): ChatWrapperGeneratedContextState & { transformedSystemMessagesToUserMessages: boolean } { - const historyWithFunctions = this.addAvailableFunctionsSystemMessageToHistory(chatHistory, availableFunctions, { - documentParams: documentFunctionParams + const { + contextText, stopGenerationTriggers, ignoreStartText, functionCall, transformedSystemMessagesToUserMessages + } = this._generateContextState({ + chatHistory, availableFunctions, documentFunctionParams, + endJinjaMessagesWithUserMessage: this._endJinjaMessagesWithUserMessage }); - if (this.convertUnsupportedSystemMessagesToUserMessages == null) { - return this._generateContextText(historyWithFunctions, { - convertSystemMessagesToUserMessagesFormat: undefined - }); - } else if (this.convertUnsupportedSystemMessagesToUserMessages.use === "always") { - return this._generateContextText(historyWithFunctions, { - convertSystemMessagesToUserMessagesFormat: this.convertUnsupportedSystemMessagesToUserMessages.format - }); - } + return {contextText, stopGenerationTriggers, ignoreStartText, functionCall, transformedSystemMessagesToUserMessages}; + } - try { - return this._generateContextText(historyWithFunctions, { - convertSystemMessagesToUserMessagesFormat: undefined - }); - } catch (error) { - return this._generateContextText(historyWithFunctions, { - convertSystemMessagesToUserMessagesFormat: this.convertUnsupportedSystemMessagesToUserMessages.format + public override addAvailableFunctionsSystemMessageToHistory( + history: readonly ChatHistoryItem[], + availableFunctions?: ChatModelFunctions, + options: {documentParams?: boolean} = {} + ) { + if (this._usingJinjaFunctionCallTemplate) + return history; + + return super.addAvailableFunctionsSystemMessageToHistory(history, availableFunctions, options); + } + + public override generateFunctionCall(name: string, params: any): LlamaText { + if (!this._stringifyFunctionParams) + return super.generateFunctionCall(name, params); + + const emptyCallParamsPlaceholder = this.settings.functions.call.emptyCallParamsPlaceholder; + return LlamaText([ + this.settings.functions.call.prefix, + name, + this.settings.functions.call.paramsPrefix, + ( + params === undefined + ? (emptyCallParamsPlaceholder === undefined || emptyCallParamsPlaceholder === "") + ? "" + : JSON.stringify(jsonDumps(emptyCallParamsPlaceholder)) + : JSON.stringify(jsonDumps(params)) + ), + this.settings.functions.call.suffix + ]); + } + + public override generateFunctionCallResult(functionName: string, functionParams: any, result: any): LlamaText { + const resolveParameters = (text: string | LlamaText) => { + return LlamaText(text) + .mapValues((value) => { + if (typeof value !== "string") + return value; + + const funcParamsText = functionParams === undefined + ? "" + : jsonDumps(functionParams); + + return value + .replaceAll("{{functionName}}", functionName) + .replaceAll( + "{{functionParams}}", + (this._stringifyFunctionParams && funcParamsText !== "") + ? JSON.stringify(funcParamsText) + : funcParamsText + ); + }); + }; + + const resultText = result === undefined + ? "void" + : jsonDumps(result); + + return LlamaText([ + resolveParameters(this.settings.functions.result.prefix), + ( + (this._stringifyFunctionResult && result !== undefined) + ? JSON.stringify(resultText) + : resultText + ), + resolveParameters(this.settings.functions.result.suffix) + ]); + } + + /** @internal */ + private _generateContextState({ + chatHistory, availableFunctions, documentFunctionParams, endJinjaMessagesWithUserMessage + }: ChatWrapperGenerateContextStateOptions & { + endJinjaMessagesWithUserMessage?: boolean + }) { + return tryMatrix({ + convertSystemMessagesToUserMessagesFormat: + getConvertUnsupportedSystemMessagesToUserMessagesTryOptions(this.convertUnsupportedSystemMessagesToUserMessages), + endJinjaMessagesWithUserMessage: endJinjaMessagesWithUserMessage == null + ? [false, true] + : [endJinjaMessagesWithUserMessage], + useMessagesWithEmbeddedTools: this._usingJinjaFunctionCallTemplate + ? [undefined, true] + : [undefined] + }, ({ + useMessagesWithEmbeddedTools, endJinjaMessagesWithUserMessage, convertSystemMessagesToUserMessagesFormat + }) => { + return this._generateContextText(chatHistory, { + convertSystemMessagesToUserMessagesFormat, availableFunctions, documentFunctionParams, + endJinjaMessagesWithUserMessage, + useMessagesWithEmbeddedTools }); - } + }); } /** @internal */ - private _generateContextText(history: readonly ChatHistoryItem[], { - convertSystemMessagesToUserMessagesFormat + private _transformChatHistory(history: readonly ChatHistoryItem[], { + convertSystemMessagesToUserMessagesFormat, availableFunctions, documentFunctionParams = true, + joinAdjacentMessagesOfTheSameType = this.joinAdjacentMessagesOfTheSameType }: { - convertSystemMessagesToUserMessagesFormat?: string - }): { - contextText: LlamaText, - stopGenerationTriggers: LlamaText[], - ignoreStartText?: LlamaText[], - transformedSystemMessagesToUserMessages: boolean - } { + convertSystemMessagesToUserMessagesFormat?: string, availableFunctions?: ChatModelFunctions, documentFunctionParams?: boolean, + joinAdjacentMessagesOfTheSameType?: boolean + }) { + const historyWithFunctions = this.addAvailableFunctionsSystemMessageToHistory(history, availableFunctions, { + documentParams: documentFunctionParams + }); + let transformedSystemMessagesToUserMessages = false; const transformedHistory = convertSystemMessagesToUserMessagesFormat == null - ? history - : history.map((item) => { + ? historyWithFunctions + : historyWithFunctions.map((item) => { if (item.type === "system") { transformedSystemMessagesToUserMessages = true; return { @@ -232,76 +504,117 @@ export class JinjaTemplateChatWrapper extends ChatWrapper { return item; }); - const resultItems: Array<{ - role: "system" | "user" | "model", - content: LlamaText - }> = []; - - const currentTexts: LlamaText[] = []; - let currentAggregateFocus: "system" | "user" | "model" | null = null; - - function flush() { - if (currentTexts.length > 0 && currentAggregateFocus != null) - resultItems.push({role: currentAggregateFocus, content: LlamaText.joinValues("\n\n", currentTexts)}); + return { + transformedHistory: joinAdjacentMessagesOfTheSameType + ? squashChatHistoryItems(transformedHistory) + : transformedHistory, + transformedSystemMessagesToUserMessages + }; + } - currentTexts.length = 0; - } + /** @internal */ + private _generateContextText(history: readonly ChatHistoryItem[], { + convertSystemMessagesToUserMessagesFormat, availableFunctions, documentFunctionParams = true, + endJinjaMessagesWithUserMessage, useMessagesWithEmbeddedTools = false + }: { + convertSystemMessagesToUserMessagesFormat?: string, availableFunctions?: ChatModelFunctions, documentFunctionParams?: boolean, + endJinjaMessagesWithUserMessage: boolean, useMessagesWithEmbeddedTools?: boolean + }): ChatWrapperGeneratedContextState & { + transformedSystemMessagesToUserMessages: boolean, + endJinjaMessagesWithUserMessage: boolean + } { + const { + transformedSystemMessagesToUserMessages, + transformedHistory + } = this._transformChatHistory(history, {convertSystemMessagesToUserMessagesFormat, availableFunctions, documentFunctionParams}); + + const generateMessagesWithEmbeddedTools = (chatHistory: readonly ChatHistoryItem[]) => ({ + messages: chatHistory.map((item): IntermediateOpenAiMessage => { + if (item.type === "system") + return { + role: "system", + content: LlamaText.fromJSON(item.text) + }; + else if (item.type === "user") + return { + role: "user", + content: LlamaText(item.text) + }; + else if (item.type === "model") + return { + role: "assistant", + content: this.generateModelResponseText(item.response) + }; - for (const item of transformedHistory) { - if (item.type === "system") { - if (!this.joinAdjacentMessagesOfTheSameType || currentAggregateFocus !== "system") - flush(); - - currentAggregateFocus = "system"; - currentTexts.push(LlamaText.fromJSON(item.text)); - } else if (item.type === "user") { - if (!this.joinAdjacentMessagesOfTheSameType || currentAggregateFocus !== "user") - flush(); - - currentAggregateFocus = "user"; - currentTexts.push(LlamaText(item.text)); - } else if (item.type === "model") { - if (!this.joinAdjacentMessagesOfTheSameType || currentAggregateFocus !== "model") - flush(); - - currentAggregateFocus = "model"; - currentTexts.push(this.generateModelResponseText(item.response)); - } else void (item satisfies never); - } + return {role: "user", content: LlamaText("")}; + }), + tools: undefined + }); + const generateMessagesWithTools = (chatHistory: readonly ChatHistoryItem[]) => ( + fromChatHistoryToIntermediateOpenAiMessages({ + chatHistory, + chatWrapperSettings: this.settings, + useRawValues: false, + functions: (availableFunctions != null && !documentFunctionParams) + ? Object.fromEntries( + Object.entries(availableFunctions) + .map(([funcName, {description, ...func}]) => [funcName, func]) + ) + : availableFunctions, + stringifyFunctionParams: this._stringifyFunctionParams, + stringifyFunctionResults: this._stringifyFunctionResult, + combineModelMessageAndToolCalls: this._combineJinjaModelMessageAndToolCalls + }) + ); - const lastItemIsModelMessage = currentAggregateFocus === "model"; - flush(); + const lastItemIsModelMessage = transformedHistory.at(-1)?.type === "model"; + const {messages: intermediateMessages, tools} = this._usingJinjaFunctionCallTemplate + ? useMessagesWithEmbeddedTools + ? { + messages: generateMessagesWithEmbeddedTools(transformedHistory).messages, + tools: generateMessagesWithTools(transformedHistory).tools + } + : generateMessagesWithTools(transformedHistory) + : generateMessagesWithEmbeddedTools(transformedHistory); - const idsGenerator = new UniqueTemplateId( + const idsGenerator = new UniqueIdGenerator( this.template + this.modelRoleName + this.userRoleName + this.systemRoleName + - (convertSystemMessagesToUserMessagesFormat ?? "") + resultItems.map(({content}) => content.toString()).join("\n\n") + (convertSystemMessagesToUserMessagesFormat ?? "") + + intermediateMessages.map(({content}) => (content?.toString() ?? "")).join("\n\n") ); - const jinjaItems: Array<{ - role: string, - content: string - }> = []; + const jinjaItems: OpenAiChatMessage[] = []; const jinjaRoleMap = { system: this.systemRoleName, user: this.userRoleName, - model: this.modelRoleName + assistant: this.modelRoleName, + tool: "tool" } as const; const idToContent = new Map(); const modelMessageIds = new Set(); const messageIds = new Set(); - for (const resultItem of resultItems) { - const id = idsGenerator.generateId(); + for (const intermediateMessage of intermediateMessages) { + if (intermediateMessage.content == null) { + jinjaItems.push({ + ...intermediateMessage, + role: jinjaRoleMap[intermediateMessage.role] ?? intermediateMessage.role + } as OpenAiChatMessage); + continue; + } + + const id = idsGenerator.generateId(intermediateMessage.role === "tool"); messageIds.add(id); - idToContent.set(id, resultItem.content); + idToContent.set(id, LlamaText(intermediateMessage.content)); jinjaItems.push({ - role: jinjaRoleMap[resultItem.role], + ...intermediateMessage, + role: jinjaRoleMap[intermediateMessage.role] ?? intermediateMessage.role, content: id - }); + } as OpenAiChatMessage); - if (resultItem.role === "model") + if (intermediateMessage.role === "assistant" || intermediateMessage.role === "tool") modelMessageIds.add(id); } @@ -313,47 +626,49 @@ export class JinjaTemplateChatWrapper extends ChatWrapper { idToContent.set(eosTokenId, new SpecialToken("EOS")); idToContent.set(eotTokenId, new SpecialToken("EOT")); - function tryOptions any)[]>(options: T): ReturnType { - for (let i = 0; i < options.length; i++) { - if (i === options.length - 1) - return options[i]!(); - - try { - return options[i]!(); - } catch (err) { - // do nothing - } - } - - throw new Error("All options failed"); + const lastJinjaItem = jinjaItems.at(-1); + let eraseRenderedJinjaFromId: string | undefined; + if (endJinjaMessagesWithUserMessage && lastJinjaItem?.role === this.modelRoleName && + typeof lastJinjaItem.content === "string" && + lastJinjaItem.content.length > 0 && + ( + (lastJinjaItem as OpenAiChatAssistantMessage)["tool_calls"] == null || + (lastJinjaItem as OpenAiChatAssistantMessage)["tool_calls"]?.length === 0 + ) + ) { + eraseRenderedJinjaFromId = lastJinjaItem.content; + jinjaItems.push({ + role: this.userRoleName, + content: idsGenerator.generateId() + } as OpenAiChatMessage); } const renderJinjaText = () => { - return tryOptions([ - () => this._jinjaTemplate.render({ - ...( - this.additionalRenderParameters == null - ? {} - : structuredClone(this.additionalRenderParameters) - ), - messages: jinjaItems, - "bos_token": bosTokenId, - "eos_token": eosTokenId, - "eot_token": eotTokenId - }), - () => this._jinjaTemplate.render({ + let res = tryMatrix({ + options: [{}, {"add_generation_prompt": true}] + }, ({options}) => ( + this._jinjaTemplate.render({ ...( this.additionalRenderParameters == null ? {} : structuredClone(this.additionalRenderParameters) ), messages: jinjaItems, + ...removeUndefinedFields({tools}), "bos_token": bosTokenId, "eos_token": eosTokenId, "eot_token": eotTokenId, - "add_generation_prompt": true + ...options }) - ]); + )); + + if (eraseRenderedJinjaFromId != null) { + const eraseIndex = res.lastIndexOf(eraseRenderedJinjaFromId); + if (eraseIndex >= 0) + res = res.slice(0, eraseIndex + eraseRenderedJinjaFromId.length); + } + + return res; }; const validateThatAllMessageIdsAreUsed = (parts: ReturnType>) => { @@ -465,7 +780,8 @@ export class JinjaTemplateChatWrapper extends ChatWrapper { ] ) ], - transformedSystemMessagesToUserMessages + transformedSystemMessagesToUserMessages, + endJinjaMessagesWithUserMessage }; } @@ -473,51 +789,43 @@ export class JinjaTemplateChatWrapper extends ChatWrapper { * Validate that this Jinja template can be rendered * @internal */ - private _runSanityTest() { + private _runSanityTest(needsToEndJinjaMessagesWithUserMessage: boolean = false): { + supportsSystemMessages: boolean, + needsToEndJinjaMessagesWithUserMessage: boolean + } { try { let supportsSystemMessages = true; for (const chatHistory of chatHistoriesForSanityTest) { - const {transformedSystemMessagesToUserMessages} = this.generateContextState({chatHistory}); + const { + transformedSystemMessagesToUserMessages, + endJinjaMessagesWithUserMessage: endedJinjaMessagesWithUserMessage + } = this._generateContextState({ + chatHistory, + endJinjaMessagesWithUserMessage: needsToEndJinjaMessagesWithUserMessage + ? true + : undefined + }); if (transformedSystemMessagesToUserMessages) supportsSystemMessages = false; + + if (!needsToEndJinjaMessagesWithUserMessage && endedJinjaMessagesWithUserMessage) { + if (chatHistory !== chatHistoriesForSanityTest[0]) + // validate tha this doesn't break the template + return this._runSanityTest(true); + else + needsToEndJinjaMessagesWithUserMessage = true; + } } - return {supportsSystemMessages}; + return {supportsSystemMessages, needsToEndJinjaMessagesWithUserMessage}; } catch (err) { throw new Error("The provided Jinja template failed the sanity test: " + String(err) + ". Inspect the Jinja template to find out what went wrong"); } } } -class UniqueTemplateId { - public readonly antiText: string; - private readonly _ids = new Set(); - - public constructor(antiText: string) { - this.antiText = antiText; - } - - public generateId(): string { - let id: string; - - do { - id = "W" + (Math.random() - .toString(36) - .slice(2)) + "W"; - } while (this._ids.has(id) || this.antiText.includes(id)); - - this._ids.add(id); - - return id; - } - - public removeId(id: string) { - this._ids.delete(id); - } -} - function resolveConvertUnsupportedSystemMessagesToUserMessagesOption( convertUnsupportedSystemMessagesToUserMessages?: JinjaTemplateChatWrapperOptions["convertUnsupportedSystemMessagesToUserMessages"] ): JinjaTemplateChatWrapperOptionsConvertMessageFormat | undefined { @@ -545,6 +853,17 @@ function resolveConvertUnsupportedSystemMessagesToUserMessagesOption( return {...defaultConvertUnsupportedSystemMessagesToUserMessagesFormat, use: "ifNeeded"}; } +function getConvertUnsupportedSystemMessagesToUserMessagesTryOptions( + convertUnsupportedSystemMessagesToUserMessages?: JinjaTemplateChatWrapperOptionsConvertMessageFormat +) { + if (convertUnsupportedSystemMessagesToUserMessages == null) + return [undefined]; + else if (convertUnsupportedSystemMessagesToUserMessages.use === "always") + return [convertUnsupportedSystemMessagesToUserMessages.format]; + + return [undefined, convertUnsupportedSystemMessagesToUserMessages.format]; +} + const chatHistoriesForSanityTest: ChatHistoryItem[][] = [ [{ type: "system", diff --git a/src/chatWrappers/generic/utils/UniqueIdGenerator.ts b/src/chatWrappers/generic/utils/UniqueIdGenerator.ts new file mode 100644 index 00000000..af39f8db --- /dev/null +++ b/src/chatWrappers/generic/utils/UniqueIdGenerator.ts @@ -0,0 +1,39 @@ +export class UniqueIdGenerator { + public readonly antiText: string; + private readonly _ids = new Set(); + + public constructor(antiText: string) { + this.antiText = antiText; + } + + public generateId(numbersOnly: boolean = false): string { + let id: string; + + do { + if (numbersOnly) { + do { + id = ( + Math.random() + .toString(10) + .slice(2) + .slice(0, String(Number.MAX_SAFE_INTEGER).length - 1) + ); + } while (id.startsWith("0")); + } else + id = "W" + ( + Math.random() + .toString(36) + .slice(2) + ) + "W"; + } while (this._ids.has(id) || this.antiText.includes(id)); + + this._ids.add(id); + + return id; + } + + public removeId(id: string) { + this._ids.delete(id); + } +} + diff --git a/src/chatWrappers/generic/utils/chatHistoryFunctionCallMessageTemplate.ts b/src/chatWrappers/generic/utils/chatHistoryFunctionCallMessageTemplate.ts index 960545a0..bd7f5eee 100644 --- a/src/chatWrappers/generic/utils/chatHistoryFunctionCallMessageTemplate.ts +++ b/src/chatWrappers/generic/utils/chatHistoryFunctionCallMessageTemplate.ts @@ -61,10 +61,11 @@ export function parseFunctionCallMessageTemplate( } /** - * Template format for how functions can be called by the model and how their results are fed to the model after the function call. - * Consists of an array with two elements: - * 1. The function call template. - * 2. The function call result template. + * Template format for how functions can be called by the model and how their results are fed to the model after function calls. + * + * Consists of an object with two properties: + * 1. **`call`**: The function call template. + * 2. **`result`**: The function call result template. * * For example: * ```ts diff --git a/src/chatWrappers/generic/utils/extractFunctionCallSettingsFromJinjaTemplate.ts b/src/chatWrappers/generic/utils/extractFunctionCallSettingsFromJinjaTemplate.ts new file mode 100644 index 00000000..95b69f0f --- /dev/null +++ b/src/chatWrappers/generic/utils/extractFunctionCallSettingsFromJinjaTemplate.ts @@ -0,0 +1,569 @@ +import {splitText} from "lifecycle-utils"; +import {ChatHistoryItem, ChatModelFunctions, ChatWrapperSettings} from "../../../types.js"; +import {LlamaText, SpecialToken, SpecialTokensText} from "../../../utils/LlamaText.js"; +import {UniqueIdGenerator} from "./UniqueIdGenerator.js"; +import {getFirstValidResult} from "./getFirstValidResult.js"; + +export function extractFunctionCallSettingsFromJinjaTemplate({ + idsGenerator, + renderTemplate +}: { + idsGenerator: UniqueIdGenerator, + renderTemplate({}: { + chatHistory: ChatHistoryItem[], functions: ChatModelFunctions, additionalParams: Record, + stringifyFunctionParams: boolean, stringifyFunctionResults: boolean, combineModelMessageAndToolCalls: boolean, + squashModelTextResponses?: boolean + }): string +}): { + settings: ChatWrapperSettings["functions"] | null, + stringifyParams: boolean, + stringifyResult: boolean, + combineModelMessageAndToolCalls: boolean +} { + const idToStaticContent = new Map(); + + const bosTokenId = idsGenerator.generateId(); + const eosTokenId = idsGenerator.generateId(); + const eotTokenId = idsGenerator.generateId(); + + idToStaticContent.set(bosTokenId, new SpecialToken("BOS")); + idToStaticContent.set(eosTokenId, new SpecialToken("EOS")); + idToStaticContent.set(eotTokenId, new SpecialToken("EOT")); + + const contentIds = new Set(); + const addContentId = (id: string) => { + contentIds.add(id); + + return id; + }; + + const systemMessage = addContentId(idsGenerator.generateId()); + const userMessage1 = addContentId(idsGenerator.generateId()); + const modelMessage1 = addContentId(idsGenerator.generateId()); + const func1name = addContentId(idsGenerator.generateId()); + const func1description = addContentId(idsGenerator.generateId()); + const func1params = addContentId(idsGenerator.generateId(true)); + const func1result = addContentId(idsGenerator.generateId(true)); + const func2name = addContentId(idsGenerator.generateId()); + const func2description = addContentId(idsGenerator.generateId()); + const func2params = addContentId(idsGenerator.generateId(true)); + const func2result = addContentId(idsGenerator.generateId(true)); + const modelMessage2 = addContentId(idsGenerator.generateId()); + const func1StringifyParam = addContentId(idsGenerator.generateId()); + const func1StringifyResult = addContentId(idsGenerator.generateId()); + + const functions1: ChatModelFunctions = { + [func1name]: { + description: func1description, + params: { + type: "number" + } + } + } as ChatModelFunctions; + const functions2: ChatModelFunctions = { + ...functions1, + [func2name]: { + description: func2description, + params: { + type: "number" + } + } + } as ChatModelFunctions; + + const baseChatHistory: ChatHistoryItem[] = [{ + type: "system", + text: systemMessage + }, { + type: "user", + text: userMessage1 + }]; + const chatHistory1Call: ChatHistoryItem[] = [...baseChatHistory, { + type: "model", + response: [ + modelMessage1, + { + type: "functionCall", + name: func1name, + + // convert to number since this will go through JSON.stringify, + // and we want to avoid escaping characters in the rendered output + params: Number(func1params), + result: Number(func1result), + startsNewChunk: true + }, + modelMessage2 + ] + }]; + const chatHistory2Calls: ChatHistoryItem[] = [...baseChatHistory, { + type: "model", + response: [ + modelMessage1, + { + type: "functionCall", + name: func1name, + + // convert to number since this will go through JSON.stringify, + // and we want to avoid escaping characters in the rendered output + params: Number(func1params), + result: Number(func1result), + startsNewChunk: true + }, + { + type: "functionCall", + name: func2name, + params: Number(func2params), + result: Number(func2result), + startsNewChunk: false + }, + modelMessage2 + ] + }]; + const chatHistory2CallsNewChunk: ChatHistoryItem[] = [...baseChatHistory, { + type: "model", + response: [ + modelMessage1, + { + type: "functionCall", + name: func1name, + + // convert to number since this will go through JSON.stringify, + // and we want to avoid escaping characters in the rendered output + params: Number(func1params), + result: Number(func1result), + startsNewChunk: true + }, + { + type: "functionCall", + name: func2name, + params: Number(func2params), + result: Number(func2result), + startsNewChunk: true + }, + modelMessage2 + ] + }]; + + const additionalParams = { + "bos_token": bosTokenId, + "eos_token": eosTokenId, + "eot_token": eotTokenId + }; + + let combineModelMessageAndToolCalls = true; + let stringifyParams = true; + let stringifyResult = true; + try { + const paramsObjectTest = renderTemplate({ + chatHistory: [...baseChatHistory, { + type: "model", + response: [ + modelMessage1, + { + type: "functionCall", + name: func1name, + params: {[func1StringifyParam]: "test"}, + result: func1StringifyResult, + startsNewChunk: true + }, + modelMessage2 + ] + }], + functions: functions1, + additionalParams, + stringifyFunctionParams: false, + stringifyFunctionResults: false, + combineModelMessageAndToolCalls + }); + stringifyParams = ( + !paramsObjectTest.includes(`"${func1StringifyParam}":`) && + !paramsObjectTest.includes(`'${func1StringifyParam}':`) + ); + } catch (err) { + // do nothing + } + + try { + const resultObjectTest = renderTemplate({ + chatHistory: [...baseChatHistory, { + type: "model", + response: [ + modelMessage1, + { + type: "functionCall", + name: func1name, + params: func1StringifyParam, + result: {[func1StringifyResult]: "test"}, + startsNewChunk: true + }, + modelMessage2 + ] + }], + functions: functions1, + additionalParams, + stringifyFunctionParams: false, + stringifyFunctionResults: false, + combineModelMessageAndToolCalls + }); + stringifyResult = ( + !resultObjectTest.includes(`"${func1StringifyResult}":`) && + !resultObjectTest.includes(`'${func1StringifyResult}':`) + ); + } catch (err) { + // do nothing + } + + combineModelMessageAndToolCalls = renderTemplate({ + chatHistory: chatHistory1Call, + functions: functions1, + additionalParams, + stringifyFunctionParams: true, + stringifyFunctionResults: true, + combineModelMessageAndToolCalls + }).includes(modelMessage1); + + let textBetween2TextualModelResponses: LlamaText = LlamaText(); + if (!combineModelMessageAndToolCalls) { + try { + const betweenModelTextualResponsesTest = renderTemplate({ + chatHistory: [...baseChatHistory, { + type: "model", + response: [modelMessage1] + }, { + type: "model", + response: [modelMessage2] + }], + functions: {}, + additionalParams, + stringifyFunctionParams: false, + stringifyFunctionResults: false, + combineModelMessageAndToolCalls, + squashModelTextResponses: false + }); + const textDiff = getTextBetweenIds( + betweenModelTextualResponsesTest, modelMessage1, modelMessage2 + ).text ?? ""; + textBetween2TextualModelResponses = reviveSeparatorText(textDiff, idToStaticContent, contentIds); + } catch (err) { + // do nothing + } + } + + let usedNewChunkFor2Calls = false; + const rendered1Call = renderTemplate({ + chatHistory: chatHistory1Call, + functions: functions1, + additionalParams, + stringifyFunctionParams: stringifyParams, + stringifyFunctionResults: stringifyResult, + combineModelMessageAndToolCalls + }); + const rendered2Calls = getFirstValidResult([ + () => renderTemplate({ + chatHistory: chatHistory2Calls, + functions: functions2, + additionalParams, + stringifyFunctionParams: stringifyParams, + stringifyFunctionResults: stringifyResult, + combineModelMessageAndToolCalls + }), + () => { + usedNewChunkFor2Calls = true; + return renderTemplate({ + chatHistory: chatHistory2CallsNewChunk, + functions: functions2, + additionalParams, + stringifyFunctionParams: stringifyParams, + stringifyFunctionResults: stringifyResult, + combineModelMessageAndToolCalls + }); + } + ]); + + const modelMessage1ToFunc1Name = getTextBetweenIds(rendered2Calls, modelMessage1, func1name); + const func1NameToFunc1Params = getTextBetweenIds( + rendered2Calls, func1name, func1params, modelMessage1ToFunc1Name.endIndex + ); + + const func1ResultIndex = rendered2Calls.indexOf(func1result, func1NameToFunc1Params.endIndex); + const func2NameIndex = rendered2Calls.indexOf(func2name, modelMessage1ToFunc1Name.endIndex); + + if (modelMessage1ToFunc1Name.text == null || + func1NameToFunc1Params.text == null || + func1ResultIndex < 0 || + func2NameIndex < 0 + ) + return {settings: null, stringifyParams, stringifyResult, combineModelMessageAndToolCalls}; + + const supportsParallelCalls = func1ResultIndex > func2NameIndex; + if (!supportsParallelCalls || usedNewChunkFor2Calls) { + const prefix = getTextBetweenIds(rendered1Call, modelMessage1, func1name); + const paramsPrefix = getTextBetweenIds( + rendered1Call, func1name, func1params, prefix.endIndex + ); + + const resultPrefix = getTextBetweenIds(rendered1Call, func1params, func1result, paramsPrefix.endIndex); + const resultSuffix = getTextBetweenIds(rendered1Call, func1result, modelMessage2, resultPrefix.endIndex); + + if (prefix.text == null || prefix.text === "" || paramsPrefix.text == null || resultPrefix.text == null || resultSuffix.text == null) + return {settings: null, stringifyParams, stringifyResult, combineModelMessageAndToolCalls}; + + return { + stringifyParams, + stringifyResult, + combineModelMessageAndToolCalls, + settings: { + call: { + optionalPrefixSpace: true, + prefix: removeCommonRevivedPrefix( + reviveSeparatorText(prefix.text, idToStaticContent, contentIds), + !combineModelMessageAndToolCalls + ? textBetween2TextualModelResponses + : LlamaText() + ), + paramsPrefix: reviveSeparatorText(paramsPrefix.text, idToStaticContent, contentIds), + suffix: "", + emptyCallParamsPlaceholder: {} + }, + result: { + prefix: reviveSeparatorText( + resultPrefix.text, + new Map([ + ...idToStaticContent.entries(), + [func1name, LlamaText("{{functionName}}")], + [func1params, LlamaText("{{functionParams}}")] + ]), + contentIds + ), + suffix: reviveSeparatorText( + resultSuffix.text, + new Map([ + ...idToStaticContent.entries(), + [func1name, LlamaText("{{functionName}}")], + [func1params, LlamaText("{{functionParams}}")] + ]), + contentIds + ) + } + } + }; + } + + const func1ParamsToFunc2Name = getTextBetweenIds( + rendered2Calls, func1params, func2name, func1NameToFunc1Params.endIndex + ); + const func2ParamsToFunc1Result = getTextBetweenIds( + rendered2Calls, func2params, func1result, func1ParamsToFunc2Name.endIndex + ); + const func1ResultToFunc2Result = getTextBetweenIds( + rendered2Calls, func1result, func2result, func2ParamsToFunc1Result.endIndex + ); + const func2ResultToModelMessage2 = getTextBetweenIds( + rendered2Calls, func2result, modelMessage2, func1ResultToFunc2Result.endIndex + ); + + + if (func1ParamsToFunc2Name.text == null || func2ParamsToFunc1Result.text == null || func1ResultToFunc2Result.text == null || + func2ResultToModelMessage2.text == null + ) + return {settings: null, stringifyParams, stringifyResult, combineModelMessageAndToolCalls}; + + const callPrefixLength = findCommonEndLength(modelMessage1ToFunc1Name.text, func1ParamsToFunc2Name.text); + const callPrefixText = func1ParamsToFunc2Name.text.slice(-callPrefixLength); + const parallelismCallPrefix = modelMessage1ToFunc1Name.text.slice(0, -callPrefixLength); + + const callSuffixLength = findCommandStartLength(func1ParamsToFunc2Name.text, func2ParamsToFunc1Result.text); + const callSuffixText = func1ParamsToFunc2Name.text.slice(0, callSuffixLength); + + const parallelismBetweenCallsText = func1ParamsToFunc2Name.text.slice(callSuffixLength, -callPrefixLength); + const callParamsPrefixText = func1NameToFunc1Params.text; + + const resultPrefixLength = findCommonEndLength(func2ParamsToFunc1Result.text, func1ResultToFunc2Result.text); + const resultPrefixText = func2ParamsToFunc1Result.text.slice(-resultPrefixLength); + + const resultSuffixLength = findCommandStartLength(func1ResultToFunc2Result.text, func2ResultToModelMessage2.text); + const resultSuffixText = func1ResultToFunc2Result.text.slice(0, resultSuffixLength); + const parallelismResultBetweenResultsText = func1ResultToFunc2Result.text.slice(resultSuffixLength, -resultPrefixLength); + const parallelismResultSuffixText = func2ResultToModelMessage2.text.slice(resultSuffixLength); + + const resolveParallelismBetweenSectionsParts = (betweenSectionsText: string) => { + const {index: endTokenIndex, text: endTokenId} = findFirstTextMatch(betweenSectionsText, [eosTokenId, eosTokenId]); + + if (endTokenIndex >= 0 && endTokenId != null) + return { + parallelismCallSuffixText: betweenSectionsText.slice(0, endTokenIndex + endTokenId.length), + parallelismResultPrefix: betweenSectionsText.slice(endTokenIndex + endTokenId.length) + }; + + const bosIndex = betweenSectionsText.indexOf(bosTokenId); + if (bosIndex >= 0) + return { + parallelismCallSuffixText: betweenSectionsText.slice(0, bosIndex), + parallelismResultPrefix: betweenSectionsText.slice(bosIndex) + }; + + return { + parallelismCallSuffixText: betweenSectionsText, + parallelismResultPrefix: "" + }; + }; + const { + parallelismCallSuffixText, + parallelismResultPrefix + } = resolveParallelismBetweenSectionsParts(func2ParamsToFunc1Result.text.slice(callSuffixLength, -resultPrefixLength)); + + return { + stringifyParams, + stringifyResult, + combineModelMessageAndToolCalls, + settings: { + call: { + optionalPrefixSpace: true, + prefix: reviveSeparatorText(callPrefixText, idToStaticContent, contentIds), + paramsPrefix: reviveSeparatorText(callParamsPrefixText, idToStaticContent, contentIds), + suffix: reviveSeparatorText(callSuffixText, idToStaticContent, contentIds), + emptyCallParamsPlaceholder: {} + }, + result: { + prefix: reviveSeparatorText( + resultPrefixText, + new Map([ + ...idToStaticContent.entries(), + [func1name, LlamaText("{{functionName}}")], + [func1params, LlamaText("{{functionParams}}")] + ]), + contentIds + ), + suffix: reviveSeparatorText( + resultSuffixText, + new Map([ + ...idToStaticContent.entries(), + [func1name, LlamaText("{{functionName}}")], + [func1params, LlamaText("{{functionParams}}")] + ]), + contentIds + ) + }, + parallelism: { + call: { + sectionPrefix: removeCommonRevivedPrefix( + reviveSeparatorText(parallelismCallPrefix, idToStaticContent, contentIds), + !combineModelMessageAndToolCalls + ? textBetween2TextualModelResponses + : LlamaText() + ), + betweenCalls: reviveSeparatorText(parallelismBetweenCallsText, idToStaticContent, contentIds), + sectionSuffix: reviveSeparatorText(parallelismCallSuffixText, idToStaticContent, contentIds) + }, + result: { + sectionPrefix: reviveSeparatorText(parallelismResultPrefix, idToStaticContent, contentIds), + betweenResults: reviveSeparatorText(parallelismResultBetweenResultsText, idToStaticContent, contentIds), + sectionSuffix: reviveSeparatorText(parallelismResultSuffixText, idToStaticContent, contentIds) + } + } + } + }; +} + +function getTextBetweenIds( + text: string, startId: string, endId: string, startIndex: number = 0 +): {text: string | undefined, endIndex: number} { + const foundStartIndex = text.indexOf(startId, startIndex); + if (foundStartIndex < 0) + return {text: undefined, endIndex: -1}; + + const foundEndIndex = text.indexOf(endId, foundStartIndex + startId.length); + if (foundEndIndex < 0) + return {text: undefined, endIndex: -1}; + + return { + text: text.slice(foundStartIndex + startId.length, foundEndIndex), + endIndex: foundEndIndex + }; +} + +function reviveSeparatorText(text: string, idMap: Map, contentIds: Set): LlamaText { + return LlamaText( + splitText(text, [...new Set([...idMap.keys(), ...contentIds])]) + .map((item) => { + if (typeof item === "string") + return new SpecialTokensText(item); + + const mappedItem = idMap.get(item.separator); + if (mappedItem != null) + return mappedItem; + + if (contentIds.has(item.separator)) + throw new Error("Content ID found in separator text"); + + return new SpecialTokensText(item.separator); + }) + ); +} + +function removeCommonRevivedPrefix(target: LlamaText, matchStart: LlamaText) { + for ( + let commonStartLength = 0; + commonStartLength < target.values.length && commonStartLength < matchStart.values.length; + commonStartLength++ + ) { + const targetValue = target.values[commonStartLength]; + const matchStartValue = matchStart.values[commonStartLength]; + + if (typeof targetValue === "string" && typeof matchStartValue === "string") { + if (targetValue === matchStartValue) + continue; + } else if (targetValue instanceof SpecialTokensText && matchStartValue instanceof SpecialTokensText) { + const commonLength = findCommandStartLength(targetValue.value, matchStartValue.value); + if (commonLength === targetValue.value.length && commonLength === matchStartValue.value.length) + continue; + + return LlamaText([ + new SpecialTokensText(targetValue.value.slice(commonLength)), + ...target.values.slice(commonStartLength + 1) + ]); + } else if (targetValue instanceof SpecialToken && matchStartValue instanceof SpecialToken) { + if (targetValue.value === matchStartValue.value) + continue; + } + + return LlamaText(target.values.slice(commonStartLength)); + } + + return LlamaText(target.values.slice(matchStart.values.length)); +} + +function findCommandStartLength(text1: string, text2: string) { + let commonStartLength = 0; + while (commonStartLength < text1.length && commonStartLength < text2.length) { + if (text1[commonStartLength] !== text2[commonStartLength]) + break; + + commonStartLength++; + } + + return commonStartLength; +} + +function findCommonEndLength(text1: string, text2: string) { + let commonEndLength = 0; + while (commonEndLength < text1.length && commonEndLength < text2.length) { + if (text1[text1.length - commonEndLength - 1] !== text2[text2.length - commonEndLength - 1]) + break; + + commonEndLength++; + } + + return commonEndLength; +} + +function findFirstTextMatch( + text: string, matchTexts: T[], startIndex: number = 0 +): {index: number, text: T} | {index: -1, text: undefined} { + for (const matchText of matchTexts) { + const index = text.indexOf(matchText, startIndex); + if (index >= 0) + return {index, text: matchText}; + } + + return {index: -1, text: undefined}; +} diff --git a/src/chatWrappers/generic/utils/extractSegmentSettingsFromTokenizerAndChatTemplate.ts b/src/chatWrappers/generic/utils/extractSegmentSettingsFromTokenizerAndChatTemplate.ts new file mode 100644 index 00000000..57fc4ceb --- /dev/null +++ b/src/chatWrappers/generic/utils/extractSegmentSettingsFromTokenizerAndChatTemplate.ts @@ -0,0 +1,47 @@ +import {ChatWrapperSettings, Tokenizer} from "../../../types.js"; +import {LlamaText, SpecialTokensText} from "../../../utils/LlamaText.js"; +import {removeUndefinedFields} from "../../../utils/removeNullFields.js"; + +export function extractSegmentSettingsFromTokenizerAndChatTemplate( + chatTemplate: string | undefined, tokenizer?: Tokenizer +): ChatWrapperSettings["segments"] { + function tryMatchPrefixSuffixPair(tryMatchGroups: [prefix: string, suffix: string][]) { + if (chatTemplate != null) { + for (const [prefix, suffix] of tryMatchGroups) { + if (chatTemplate.includes(prefix) && chatTemplate.includes(suffix)) + return { + prefix: LlamaText(new SpecialTokensText(prefix)), + suffix: LlamaText(new SpecialTokensText(suffix)) + }; + } + } + + if (tokenizer != null) { + for (const [prefix, suffix] of tryMatchGroups) { + const thinkTokens = tokenizer(prefix, true, "trimLeadingSpace"); + const thinkEndTokens = tokenizer(suffix, true, "trimLeadingSpace"); + + const [thinkToken] = thinkTokens; + const [thinkEndToken] = thinkEndTokens; + + if (thinkTokens.length === 1 && thinkEndTokens.length === 1 && + thinkToken != null && thinkEndToken != null + ) { + return { + prefix: LlamaText(new SpecialTokensText(prefix)), + suffix: LlamaText(new SpecialTokensText(suffix)) + }; + } + } + } + + return undefined; + } + + return removeUndefinedFields({ + thought: tryMatchPrefixSuffixPair([ + ["", ""], // DeepSeek, QwQ + ["", ""] // EXAONE Deep + ]) + }); +} diff --git a/src/chatWrappers/generic/utils/getFirstValidResult.ts b/src/chatWrappers/generic/utils/getFirstValidResult.ts new file mode 100644 index 00000000..e030ac2d --- /dev/null +++ b/src/chatWrappers/generic/utils/getFirstValidResult.ts @@ -0,0 +1,19 @@ +/** + * Call the functions in the array one by one and return the result of the first one that doesn't throw an error. + * + * If all functions throw an error, throw the error of the last function. + */ +export function getFirstValidResult any)[]>(options: T): ReturnType { + for (let i = 0; i < options.length; i++) { + if (i === options.length - 1) + return options[i]!(); + + try { + return options[i]!(); + } catch (err) { + // do nothing + } + } + + throw new Error("All options failed"); +} diff --git a/src/chatWrappers/generic/utils/squashChatHistoryItems.ts b/src/chatWrappers/generic/utils/squashChatHistoryItems.ts new file mode 100644 index 00000000..4582455f --- /dev/null +++ b/src/chatWrappers/generic/utils/squashChatHistoryItems.ts @@ -0,0 +1,40 @@ +import {ChatHistoryItem} from "../../../types.js"; +import {LlamaText} from "../../../utils/LlamaText.js"; + +export function squashChatHistoryItems(history: readonly ChatHistoryItem[]) { + const res: ChatHistoryItem[] = []; + for (const item of history) { + const lastItem = res.at(-1); + if (lastItem == null) { + res.push(structuredClone(item)); + continue; + } + + if (lastItem.type === "system" && item.type === "system") + lastItem.text = LlamaText.joinValues("\n\n", [ + LlamaText.fromJSON(lastItem.text), + LlamaText.fromJSON(item.text) + ]).toJSON(); + else if (lastItem.type === "user" && item.type === "user") + lastItem.text += "\n\n" + item.text; + else if (lastItem.type === "model" && item.type === "model") { + const responsesToAdd = ["\n\n", ...item.response]; + + while (typeof responsesToAdd[0] === "string" && typeof lastItem.response.at(-1) === "string") { + const lastResponses = lastItem.response.pop()!; + if (typeof lastResponses !== "string") { + lastItem.response.push(lastResponses); + break; + } + + lastItem.response.push(lastResponses + responsesToAdd.shift()!); + } + + while (responsesToAdd.length > 0) + lastItem.response.push(responsesToAdd.shift()!); + } else + res.push(structuredClone(item)); + } + + return res; +} diff --git a/src/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.ts b/src/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.ts index 32b5642b..c92bec3c 100644 --- a/src/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.ts +++ b/src/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.ts @@ -184,4 +184,32 @@ export class ChatModelFunctionsDocumentationGenerator { return functionsLines; } /* eslint-enable @stylistic/max-len */ + + public getQwenFunctionSignatures({documentParams = true}: {documentParams?: boolean} = {}) { + return this._convertToJinjaTools({documentParams}) + .map((tool) => jsonDumps(tool)) + .join("\n"); + } + + /** @internal */ + private _convertToJinjaTools({documentParams = true}: {documentParams?: boolean} = {}) { + const chatModelFunctions = this.chatModelFunctions; + + if (!this.hasAnyFunctions || chatModelFunctions == null) + return []; + + return [...Object.entries(chatModelFunctions)] + .map(([functionName, functionDefinition]) => { + return { + type: "function", + function: { + name: functionName, + description: functionDefinition.description, + parameters: documentParams + ? functionDefinition.params + : undefined + } + }; + }); + } } diff --git a/src/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.ts b/src/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.ts index 77d7bfdf..b9259597 100644 --- a/src/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.ts +++ b/src/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.ts @@ -1,15 +1,27 @@ +import {splitText} from "lifecycle-utils"; import {ChatWrapper} from "../../ChatWrapper.js"; -import {ChatHistoryItem, ChatModelResponse, ChatUserMessage, Tokenizer} from "../../types.js"; +import {ChatHistoryItem, ChatModelResponse, ChatUserMessage, ChatWrapperSettings, Tokenizer} from "../../types.js"; import {JinjaTemplateChatWrapper, JinjaTemplateChatWrapperOptions} from "../generic/JinjaTemplateChatWrapper.js"; -import {SpecialToken, LlamaText} from "../../utils/LlamaText.js"; +import {SpecialToken, LlamaText, SpecialTokensText} from "../../utils/LlamaText.js"; import {compareTokens} from "../../utils/compareTokens.js"; import {StopGenerationDetector} from "../../utils/StopGenerationDetector.js"; +import {Writable} from "../../utils/utilTypes.js"; +import {jsonDumps} from "./jsonDumps.js"; export function isJinjaTemplateEquivalentToSpecializedChatWrapper( jinjaTemplateWrapperOptions: JinjaTemplateChatWrapperOptions, specializedChatWrapper: ChatWrapper, tokenizer?: Tokenizer ): boolean { + const getCheckChatHistories = (jinjaChatWrapper: JinjaTemplateChatWrapper) => [ + ...testChatHistories, + ...( + (jinjaChatWrapper.usingJinjaFunctionCallTemplate || jinjaTemplateWrapperOptions.functionCallMessageTemplate === "auto") + ? testChatHistoriesWithFunctionCalls + : [] + ) + ]; + const canTestMultipleConvertSystemMessagesToUserMessages = jinjaTemplateWrapperOptions.convertUnsupportedSystemMessagesToUserMessages == null || jinjaTemplateWrapperOptions.convertUnsupportedSystemMessagesToUserMessages === "auto"; @@ -22,8 +34,9 @@ export function isJinjaTemplateEquivalentToSpecializedChatWrapper( : jinjaTemplateWrapperOptions.convertUnsupportedSystemMessagesToUserMessages, trimLeadingWhitespaceInResponses: false }); + const checkChatHistories = getCheckChatHistories(jinjaChatWrapper); - if (checkEquivalence(jinjaChatWrapper, specializedChatWrapper, testChatHistories, tokenizer)) + if (checkEquivalence(jinjaChatWrapper, specializedChatWrapper, checkChatHistories, tokenizer)) return true; } catch (err) { // Do nothing @@ -38,8 +51,9 @@ export function isJinjaTemplateEquivalentToSpecializedChatWrapper( : jinjaTemplateWrapperOptions.convertUnsupportedSystemMessagesToUserMessages, trimLeadingWhitespaceInResponses: true }); + const checkChatHistories = getCheckChatHistories(jinjaChatWrapperWithLeadingWhitespaceTrimming); - if (checkEquivalence(jinjaChatWrapperWithLeadingWhitespaceTrimming, specializedChatWrapper, testChatHistories, tokenizer)) + if (checkEquivalence(jinjaChatWrapperWithLeadingWhitespaceTrimming, specializedChatWrapper, checkChatHistories, tokenizer)) return true; } catch (err) { // Do nothing @@ -49,40 +63,6 @@ export function isJinjaTemplateEquivalentToSpecializedChatWrapper( return false; const convertSystemMessagesToUserMessagesTemplate = "### System message\n\n{{message}}\n\n----"; - const transformedTestChatHistories = testChatHistories - .map((history) => ( - history - .slice() - .map((item, index, array) => { - if (item.type === "system") { - if (index === 0 && array.length > 1 && array[1]!.type === "user") { - array[1] = { - type: "user", - text: LlamaText([ - LlamaText.joinValues( - LlamaText.fromJSON(item.text), - convertSystemMessagesToUserMessagesTemplate.split("{{message}}") - ), - "\n\n", - array[1]!.text - ]).toString() - } satisfies ChatHistoryItem; - return null; - } - - return { - type: "user", - text: LlamaText.joinValues( - LlamaText.fromJSON(item.text), - convertSystemMessagesToUserMessagesTemplate.split("{{message}}") - ).toString() - } satisfies ChatHistoryItem; - } - - return item; - }) - .filter((item): item is ChatUserMessage | ChatModelResponse => item != null) - )); try { const jinjaChatWrapper = new JinjaTemplateChatWrapper({ @@ -93,8 +73,13 @@ export function isJinjaTemplateEquivalentToSpecializedChatWrapper( }, trimLeadingWhitespaceInResponses: false }); + const checkChatHistories = getCheckChatHistories(jinjaChatWrapper); + const transformedCheckChatHistories = convertTestChatHistoriesSystemMessagesToUserMessages( + checkChatHistories, + convertSystemMessagesToUserMessagesTemplate + ); - if (checkEquivalence(jinjaChatWrapper, specializedChatWrapper, transformedTestChatHistories, tokenizer)) + if (checkEquivalence(jinjaChatWrapper, specializedChatWrapper, transformedCheckChatHistories, tokenizer)) return true; } catch (err) { // Do nothing @@ -110,9 +95,14 @@ export function isJinjaTemplateEquivalentToSpecializedChatWrapper( }, trimLeadingWhitespaceInResponses: true }); + const checkChatHistories = getCheckChatHistories(jinjaChatWrapperWithLeadingWhitespaceTrimming); + const transformedCheckChatHistories = convertTestChatHistoriesSystemMessagesToUserMessages( + checkChatHistories, + convertSystemMessagesToUserMessagesTemplate + ); if (checkEquivalence( - jinjaChatWrapperWithLeadingWhitespaceTrimming, specializedChatWrapper, transformedTestChatHistories, tokenizer + jinjaChatWrapperWithLeadingWhitespaceTrimming, specializedChatWrapper, transformedCheckChatHistories, tokenizer )) return true; } catch (err) { @@ -130,7 +120,21 @@ function checkEquivalence( ): boolean { for (const testChatHistory of testChatHistories) { const jinjaRes = jinjaChatWrapper.generateContextState({chatHistory: testChatHistory}); - const specializedWrapperRes = specializedChatWrapper.generateContextState({chatHistory: testChatHistory}); + jinjaRes.contextText = convertFunctionNameAndParamsToRegularText(jinjaRes.contextText, testChatHistory); + + const convertedSettings = convertChatWrapperSettingsToUseSpecialTokensText(specializedChatWrapper.settings); + const originalSpecializedSettings = specializedChatWrapper.settings; + + if (convertedSettings != null) + (specializedChatWrapper as Writable).settings = convertedSettings; + + let specializedWrapperRes: ReturnType; + try { + specializedWrapperRes = specializedChatWrapper.generateContextState({chatHistory: testChatHistory}); + } finally { + if (convertedSettings != null) + (specializedChatWrapper as Writable).settings = originalSpecializedSettings; + } if (!compareContextTexts(jinjaRes.contextText, specializedWrapperRes.contextText, tokenizer)) return false; @@ -227,6 +231,145 @@ function compareContextTexts(text1: LlamaText, text2: LlamaText, tokenizer?: Tok )); } +function convertTestChatHistoriesSystemMessagesToUserMessages(chatHistories: ChatHistoryItem[][], template: string) { + return chatHistories + .map((history) => ( + history + .slice() + .map((item, index, array) => { + if (item.type === "system") { + if (index === 0 && array.length > 1 && array[1]!.type === "user") { + array[1] = { + type: "user", + text: LlamaText([ + LlamaText.joinValues( + LlamaText.fromJSON(item.text), + template.split("{{message}}") + ), + "\n\n", + array[1]!.text + ]).toString() + } satisfies ChatHistoryItem; + return null; + } + + return { + type: "user", + text: LlamaText.joinValues( + LlamaText.fromJSON(item.text), + template.split("{{message}}") + ).toString() + } satisfies ChatHistoryItem; + } + + return item; + }) + .filter((item): item is ChatUserMessage | ChatModelResponse => item != null) + )); +} + +function convertChatWrapperSettingsToUseSpecialTokensText(settings: ChatWrapperSettings): ChatWrapperSettings | null { + if (settings?.functions == null) + return null; + + function convertToSpecialTokensText(value: string | LlamaText, keepTexts?: string[]): string | LlamaText; + function convertToSpecialTokensText(value: string | LlamaText | undefined, keepTexts?: string[]): string | LlamaText | undefined; + function convertToSpecialTokensText(value: string | LlamaText | undefined, keepTexts?: string[]): string | LlamaText | undefined { + if (value == null) + return value; + + return LlamaText( + LlamaText(value).values + .map((item) => { + if (typeof item !== "string") + return item; + + if (keepTexts == null || keepTexts.length === 0) + return new SpecialTokensText(item); + + return splitText(item, keepTexts).map((textPart) => { + if (typeof textPart === "string") + return new SpecialTokensText(textPart); + + return textPart.separator; + }); + }) + ); + } + + return { + ...settings, + functions: { + ...settings.functions, + call: { + ...settings.functions.call, + prefix: convertToSpecialTokensText(settings.functions.call.prefix), + suffix: convertToSpecialTokensText(settings.functions.call.suffix), + paramsPrefix: convertToSpecialTokensText(settings.functions.call.paramsPrefix) + }, + result: { + ...settings.functions.result, + prefix: convertToSpecialTokensText(settings.functions.result.prefix, ["{{functionName}}", "{{functionParams}}"]), + suffix: convertToSpecialTokensText(settings.functions.result.suffix, ["{{functionName}}", "{{functionParams}}"]) + }, + parallelism: settings.functions.parallelism == null + ? settings.functions.parallelism + : { + ...settings.functions.parallelism, + call: { + ...settings.functions.parallelism.call, + sectionPrefix: convertToSpecialTokensText(settings.functions.parallelism.call.sectionPrefix), + betweenCalls: convertToSpecialTokensText(settings.functions.parallelism.call.betweenCalls), + sectionSuffix: convertToSpecialTokensText(settings.functions.parallelism.call.sectionSuffix) + }, + result: settings.functions.parallelism.result == null + ? settings.functions.parallelism.result + : { + ...settings.functions.parallelism.result, + sectionPrefix: convertToSpecialTokensText(settings.functions.parallelism.result.sectionPrefix), + betweenResults: convertToSpecialTokensText(settings.functions.parallelism.result.betweenResults), + sectionSuffix: convertToSpecialTokensText(settings.functions.parallelism.result.sectionSuffix) + } + } + } + }; +} + +function convertFunctionNameAndParamsToRegularText(contextText: LlamaText, chatHistory: ChatHistoryItem[]): LlamaText { + const ensureRegularTextItems = new Set(); + + for (const item of chatHistory) { + if (item.type !== "model") + continue; + + for (const response of item.response) { + if (typeof response === "string" || response.type !== "functionCall") + continue; + + ensureRegularTextItems.add(response.name); + if (response.params !== undefined && response.params !== "") + ensureRegularTextItems.add(jsonDumps(response.params)); + } + } + + const ensureRegularTextItemsArray = [...ensureRegularTextItems]; + + return LlamaText( + contextText.values.map((item) => { + if (!(item instanceof SpecialTokensText)) + return item; + + return splitText(item.value, ensureRegularTextItemsArray) + .map((textPart) => { + if (typeof textPart === "string") + return new SpecialTokensText(textPart); + + return textPart.separator; + }); + }) + ); +} + const testChatHistories: ChatHistoryItem[][] = [ [{ type: "system", @@ -284,6 +427,65 @@ const testChatHistories: ChatHistoryItem[][] = [ response: ["Result2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"] }] ]; +const testChatHistoriesWithFunctionCalls: ChatHistoryItem[][] = [ + [{ + type: "system", + text: "System message ~!@#$%^&*()\n*" + }, { + type: "user", + text: "Message 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~" + }, { + type: "model", + response: ["Result 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"] + }, { + type: "user", + text: "Message2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~" + }, { + type: "model", + response: [ + "Result2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~", + { + type: "functionCall", + name: "func1name", + params: {param1: "value1"}, + result: "func1result" + }, + "Result3 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~" + ] + }], + + [{ + type: "system", + text: "System message ~!@#$%^&*()\n*" + }, { + type: "user", + text: "Message 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~" + }, { + type: "model", + response: ["Result 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"] + }, { + type: "user", + text: "Message2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~" + }, { + type: "model", + response: [ + "Result2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~", + { + type: "functionCall", + name: "func1name", + params: {param1: "value1"}, + result: "func1result" + }, + { + type: "functionCall", + name: "func2name", + params: {param1: "value2"}, + result: "func2result" + }, + "Result3 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~" + ] + }] +]; function removeLeadingBos(llamaText: LlamaText) { if (llamaText.values.length === 0) diff --git a/src/chatWrappers/utils/resolveChatWrapper.ts b/src/chatWrappers/utils/resolveChatWrapper.ts index 0506b68e..9feb89fc 100644 --- a/src/chatWrappers/utils/resolveChatWrapper.ts +++ b/src/chatWrappers/utils/resolveChatWrapper.ts @@ -17,14 +17,15 @@ import {MistralChatWrapper} from "../MistralChatWrapper.js"; import {Tokenizer} from "../../types.js"; import {includesText} from "../../utils/includesText.js"; import {LlamaModel} from "../../evaluator/LlamaModel/LlamaModel.js"; +import {QwenChatWrapper} from "../QwenChatWrapper.js"; import {isJinjaTemplateEquivalentToSpecializedChatWrapper} from "./isJinjaTemplateEquivalentToSpecializedChatWrapper.js"; import {getModelLinageNames} from "./getModelLinageNames.js"; import type {GgufFileInfo} from "../../gguf/types/GgufFileInfoTypes.js"; export const specializedChatWrapperTypeNames = Object.freeze([ - "general", "deepSeek", "llama3.2-lightweight", "llama3.1", "llama3", "llama2Chat", "mistral", "alpacaChat", "functionary", "chatML", - "falconChat", "gemma" + "general", "deepSeek", "qwen", "llama3.2-lightweight", "llama3.1", "llama3", "llama2Chat", "mistral", "alpacaChat", "functionary", + "chatML", "falconChat", "gemma" ] as const); export type SpecializedChatWrapperTypeName = (typeof specializedChatWrapperTypeNames)[number]; @@ -43,6 +44,7 @@ export type ResolvableChatWrapperTypeName = (typeof resolvableChatWrapperTypeNam export const chatWrappers = Object.freeze({ "general": GeneralChatWrapper, "deepSeek": DeepSeekChatWrapper, + "qwen": QwenChatWrapper, "llama3.1": Llama3_1ChatWrapper, "llama3.2-lightweight": Llama3_2LightweightChatWrapper, "llama3": Llama3ChatWrapper, @@ -110,7 +112,9 @@ export type ResolveChatWrapperWithModelOptions = { type?: "auto" | SpecializedChatWrapperTypeName | TemplateChatWrapperTypeName, customWrapperSettings?: { - [wrapper in keyof typeof chatWrappers]?: ConstructorParameters<(typeof chatWrappers)[wrapper]>[0] + [wrapper in keyof typeof chatWrappers]?: typeof JinjaTemplateChatWrapper extends (typeof chatWrappers)[wrapper] + ? Partial[0]> + : ConstructorParameters<(typeof chatWrappers)[wrapper]>[0] }, /** @@ -182,6 +186,7 @@ export function resolveChatWrapper( if (options instanceof LlamaModel) return resolveChatWrapper({ ...(modelOptions ?? {}), + customWrapperSettings: modelOptions?.customWrapperSettings as ResolveChatWrapperOptions["customWrapperSettings"], bosString: options.tokens.bosString, filename: options.filename, fileInfo: options.fileInfo, @@ -236,6 +241,7 @@ export function resolveChatWrapper( } else { try { return new JinjaTemplateChatWrapper({ + tokenizer, ...(customWrapperSettings?.jinjaTemplate ?? {}), template: jinjaTemplate }); @@ -261,6 +267,7 @@ export function resolveChatWrapper( if (modelJinjaTemplate != null && modelJinjaTemplate.trim() !== "") { const jinjaTemplateChatWrapperOptions: JinjaTemplateChatWrapperOptions = { + tokenizer, ...(customWrapperSettings?.jinjaTemplate ?? {}), template: modelJinjaTemplate }; @@ -292,7 +299,7 @@ export function resolveChatWrapper( const applyConfig = testConfigurationOrPair instanceof Array ? (testConfigurationOrPair[1]! ?? {}) : testConfigurationOrPair; - const additionalJinjaParameters = testConfigurationOrPair instanceof Array + const additionalJinjaOptions = testConfigurationOrPair instanceof Array ? testConfigurationOrPair[2]! : undefined; @@ -307,12 +314,13 @@ export function resolveChatWrapper( const chatWrapper = new (Wrapper as any)(testChatWrapperSettings); const jinjaTemplateChatWrapperOptionsWithAdditionalParameters: JinjaTemplateChatWrapperOptions = { + ...(additionalJinjaOptions ?? {}), ...jinjaTemplateChatWrapperOptions, - additionalRenderParameters: additionalJinjaParameters == null + additionalRenderParameters: additionalJinjaOptions?.additionalRenderParameters == null ? jinjaTemplateChatWrapperOptions.additionalRenderParameters : { ...(jinjaTemplateChatWrapperOptions.additionalRenderParameters ?? {}), - ...additionalJinjaParameters + ...additionalJinjaOptions.additionalRenderParameters } }; diff --git a/src/cli/recommendedModels.ts b/src/cli/recommendedModels.ts index eb722bb7..c1a4329c 100644 --- a/src/cli/recommendedModels.ts +++ b/src/cli/recommendedModels.ts @@ -75,6 +75,20 @@ export const recommendedModels: ModelRecommendation[] = [{ "hf:mradermacher/DeepSeek-R1-Distill-Llama-70B-GGUF:Q5_K_S", "hf:mradermacher/DeepSeek-R1-Distill-Llama-70B-GGUF:Q4_K_M" ] +}, { + name: "QwQ 32B", + abilities: ["chat", "complete", "functionCalling", "reasoning"], + description: "QwQ model was created by Alibaba and is using chain of though (CoT) to reason across a wide variety of topics.\n" + + "It's optimized for an assistant-like chat use cases, with native support for function calling.\n" + + "This model is censored, but its responses quality on many topics is extremely high.\n" + + "Its performance is comparable to DeepSeek R1 671B.", + + fileOptions: [ + "hf:Qwen/QwQ-32B-GGUF:Q8_0", + "hf:Qwen/QwQ-32B-GGUF:Q6_K", + "hf:Qwen/QwQ-32B-GGUF:Q5_K_M", + "hf:Qwen/QwQ-32B-GGUF:Q4_K_M" + ] }, { name: "Llama 3.1 8B", abilities: ["chat", "complete", "functionCalling"], diff --git a/src/evaluator/LlamaChat/LlamaChat.ts b/src/evaluator/LlamaChat/LlamaChat.ts index 031bed80..d2843d2c 100644 --- a/src/evaluator/LlamaChat/LlamaChat.ts +++ b/src/evaluator/LlamaChat/LlamaChat.ts @@ -23,6 +23,7 @@ import {resolveLastTokens} from "../../utils/resolveLastTokens.js"; import {LlamaSampler} from "../LlamaContext/LlamaSampler.js"; import {LlamaModel} from "../LlamaModel/LlamaModel.js"; import {getChatWrapperSegmentDefinition} from "../../utils/getChatWrapperSegmentDefinition.js"; +import {jsonDumps} from "../../chatWrappers/utils/jsonDumps.js"; import { eraseFirstResponseAndKeepFirstSystemChatContextShiftStrategy } from "./utils/contextShiftStrategies/eraseFirstResponseAndKeepFirstSystemChatContextShiftStrategy.js"; @@ -2209,8 +2210,15 @@ class GenerateResponseState({ handler }: { description?: string, - params?: Readonly & ProhibitUnknownProperties, + params?: Readonly, handler: (params: GbnfJsonSchemaToType) => Promise | any }): ChatSessionModelFunction { return { @@ -24,18 +24,3 @@ export function defineChatSessionFunction({ handler }; } - -/** @hidden */ -type ProhibitUnknownProperties = BaseType extends object - ? Input extends object - ? ( - Input & - {[K in Exclude]: never} & - { - [K in keyof BaseType]: K extends keyof Input - ? ProhibitUnknownProperties - : BaseType[K] - } - ) - : never - : Input; diff --git a/src/evaluator/LlamaGrammar.ts b/src/evaluator/LlamaGrammar.ts index 0db56f9a..06ddd926 100644 --- a/src/evaluator/LlamaGrammar.ts +++ b/src/evaluator/LlamaGrammar.ts @@ -94,7 +94,11 @@ export class LlamaGrammar { const grammar = await fs.readFile(grammarFile, "utf8"); return new LlamaGrammar(llama, { grammar, - stopGenerationTriggers: [LlamaText(["\n".repeat(10)])], // this is a workaround for the model not stopping to generate text, + stopGenerationTriggers: [LlamaText(["\n".repeat( + (type === "json" || type === "json_arr") + ? 4 + : 10 + )])], // this is a workaround for the model not stopping to generate text, trimWhitespaceSuffix: true }); } diff --git a/src/evaluator/LlamaModel/LlamaModel.ts b/src/evaluator/LlamaModel/LlamaModel.ts index e7acc8d4..0be0bddc 100644 --- a/src/evaluator/LlamaModel/LlamaModel.ts +++ b/src/evaluator/LlamaModel/LlamaModel.ts @@ -274,6 +274,10 @@ export class LlamaModel { return this._disposedState.disposed; } + public get llama() { + return this._llama; + } + public get tokens() { return this._tokens; } diff --git a/src/evaluator/LlamaRankingContext.ts b/src/evaluator/LlamaRankingContext.ts index 63cdec9f..8a32251e 100644 --- a/src/evaluator/LlamaRankingContext.ts +++ b/src/evaluator/LlamaRankingContext.ts @@ -197,7 +197,7 @@ export class LlamaRankingContext { break; // only generate one token to get embeddings } - const embedding = this._llamaContext._ctx.getEmbedding(input.length); + const embedding = this._llamaContext._ctx.getEmbedding(input.length, 1); if (embedding.length === 0) return 0; diff --git a/src/gguf/types/GgufMetadataTypes.ts b/src/gguf/types/GgufMetadataTypes.ts index b39275a0..40388ca5 100644 --- a/src/gguf/types/GgufMetadataTypes.ts +++ b/src/gguf/types/GgufMetadataTypes.ts @@ -227,7 +227,8 @@ export type GgufMetadataTokenizer = { readonly pre?: "default" | "llama3" | "llama-v3" | "llama-bpe" | "deepseek-llm" | "deepseek-coder" | "falcon" | "falcon3" | "mpt" | "starcoder" | "gpt-2" | "phi-2" | "jina-es" | "jina-de" | "jina-v1-en" | "jina-v2-es" | "jina-v2-de" | "jina-v2-code" | "refact" | "command-r" | "qwen2" | "stablelm2" | "olmo" | "dbrx" | "smaug-bpe" | "poro-chat" | "chatglm-bpe" | "viking" | - "jais" | "tekken" | "smollm" | "codeshell" | "bloom" | "gpt3-finnish" | "exaone" | "chameleon" | "minerva-7b" | string, + "jais" | "tekken" | "smollm" | "codeshell" | "bloom" | "gpt3-finnish" | "exaone" | "chameleon" | "minerva-7b" | "megrez" | + "gpt-4o" | string, readonly tokens: readonly string[], readonly token_type: GgufMetadataTokenizerTokenType[], readonly token_type_count?: number, diff --git a/src/index.ts b/src/index.ts index 9fcf3415..8bc97681 100644 --- a/src/index.ts +++ b/src/index.ts @@ -47,6 +47,7 @@ import {InsufficientMemoryError} from "./utils/InsufficientMemoryError.js"; import {ChatWrapper} from "./ChatWrapper.js"; import {EmptyChatWrapper} from "./chatWrappers/EmptyChatWrapper.js"; import {DeepSeekChatWrapper} from "./chatWrappers/DeepSeekChatWrapper.js"; +import {QwenChatWrapper} from "./chatWrappers/QwenChatWrapper.js"; import {Llama3_2LightweightChatWrapper} from "./chatWrappers/Llama3_2LightweightChatWrapper.js"; import {Llama3_1ChatWrapper} from "./chatWrappers/Llama3_1ChatWrapper.js"; import {Llama3ChatWrapper} from "./chatWrappers/Llama3ChatWrapper.js"; @@ -203,6 +204,7 @@ export { type ChatWrapperGenerateInitialHistoryOptions, EmptyChatWrapper, DeepSeekChatWrapper, + QwenChatWrapper, Llama3_2LightweightChatWrapper, Llama3_1ChatWrapper, Llama3ChatWrapper, diff --git a/src/types.ts b/src/types.ts index c357f563..1b7fed75 100644 --- a/src/types.ts +++ b/src/types.ts @@ -26,7 +26,16 @@ export type ChatWrapperSettings = { readonly optionalPrefixSpace: boolean, readonly prefix: string | LlamaText, readonly paramsPrefix: string | LlamaText, - readonly suffix: string | LlamaText + readonly suffix: string | LlamaText, + + /** + * The value to use when the function has no arguments. + * + * Will be stringified using `jsonDumps`. + * + * Defaults to `""`. + */ + readonly emptyCallParamsPlaceholder?: object | string | number | boolean | null }, readonly result: { diff --git a/src/utils/OpenAIFormat.ts b/src/utils/OpenAIFormat.ts new file mode 100644 index 00000000..a01687e2 --- /dev/null +++ b/src/utils/OpenAIFormat.ts @@ -0,0 +1,746 @@ +import {splitText} from "lifecycle-utils"; +import {allSegmentTypes, ChatHistoryItem, ChatModelFunctions, ChatModelSegmentType, ChatWrapperSettings} from "../types.js"; +import {ChatWrapper} from "../ChatWrapper.js"; +import {jsonDumps} from "../chatWrappers/utils/jsonDumps.js"; +import {LlamaChatResponseFunctionCall} from "../evaluator/LlamaChat/LlamaChat.js"; +import {TokenBias} from "../evaluator/TokenBias.js"; +import {LlamaGrammar} from "../evaluator/LlamaGrammar.js"; +import {Llama} from "../bindings/Llama.js"; +import {LlamaModel} from "../evaluator/LlamaModel/LlamaModel.js"; +import {GbnfJsonSchema} from "./gbnfJson/types.js"; +import {getChatWrapperSegmentDefinition} from "./getChatWrapperSegmentDefinition.js"; +import {LlamaText} from "./LlamaText.js"; +import {removeUndefinedFields} from "./removeNullFields.js"; + +// Note: this is a work in progress and is not yet complete. +// Will be exported through the main index.js file once this is complete and fully tested + +export class OpenAIFormat { + public readonly chatWrapper: ChatWrapper; + + public constructor({ + chatWrapper + }: { + chatWrapper: ChatWrapper + }) { + this.chatWrapper = chatWrapper; + } + + /** + * Convert `node-llama-cpp`'s chat history to OpenAI format. + * + * Note that this conversion is lossy, as OpenAI's format is more limited than `node-llama-cpp`'s. + */ + public toOpenAiChat({ + chatHistory, + functionCalls, + functions, + useRawValues = true + }: { + chatHistory: ChatHistoryItem[], + functionCalls?: LlamaChatResponseFunctionCall[], + functions?: Functions, + useRawValues?: boolean + }): OpenAiChatCreationOptions { + const res = fromChatHistoryToIntermediateOpenAiMessages({ + chatHistory, + chatWrapperSettings: this.chatWrapper.settings, + functionCalls, + functions, + useRawValues + }); + + return { + ...res, + messages: fromIntermediateToCompleteOpenAiMessages(res.messages) + }; + } + + public async fromOpenAiChat( + options: OpenAiChatCreationOptions, {llama, model}: {llama?: Llama, model?: LlamaModel} = {} + ): Promise<{ + chatHistory: ChatHistoryItem[], + functionCalls?: LlamaChatResponseFunctionCall[], + functions?: Functions, + + tokenBias?: TokenBias, + maxTokens?: number, + maxParallelFunctionCalls?: number, + grammar?: LlamaGrammar, + seed?: number, + customStopTriggers?: string[], + temperature?: number, + minP?: number, + topK?: number, + topP?: number + }> { + const {messages, tools} = options; + + if ( + (options["response_format"]?.type === "json_schema" || options["response_format"]?.type === "json_object") && + tools != null && options["tool_choice"] !== "none" + ) + throw new Error("Using both JSON response format and tools is not supported yet"); + + const {chatHistory, functionCalls: pendingFunctionCalls} = fromOpenAiMessagesToChatHistory({ + messages, + chatWrapper: this.chatWrapper + }); + + const functions: Functions = {} as Functions; + for (const tool of tools ?? []) { + functions[tool.function.name as keyof Functions] = { + description: tool.function.description, + params: tool.function.parameters + } as Functions[keyof Functions]; + } + + let tokenBias: TokenBias | undefined; + if (options["logit_bias"] != null && model != null) { + tokenBias = TokenBias.for(model); + for (const [token, bias] of Object.entries(options["logit_bias"]!)) + tokenBias.set(token, {logit: bias}); + } + + let grammar: LlamaGrammar | undefined; + if (options["response_format"]?.type === "json_schema" && llama != null) { + const schema = options["response_format"]?.json_schema?.schema; + if (schema != null) + grammar = await llama.createGrammarForJsonSchema(schema); + else + grammar = await llama.getGrammarFor("json"); + } else if (options["response_format"]?.type === "json_object" && llama != null) + grammar = await llama.getGrammarFor("json"); + + return { + chatHistory, + functionCalls: pendingFunctionCalls, + functions: Object.keys(functions).length === 0 + ? undefined + : functions, + + tokenBias, + maxTokens: options["max_completion_tokens"] ?? options["max_tokens"] ?? undefined, + maxParallelFunctionCalls: options["parallel_tool_calls"] === false ? 1 : undefined, + grammar, + seed: options.seed ?? undefined, + customStopTriggers: typeof options.stop === "string" + ? [options.stop] + : options.stop instanceof Array + ? options.stop.filter((item) => typeof item === "string") + : undefined, + temperature: options.temperature ?? undefined, + minP: options["min_p"] ?? undefined, + topK: options["top_k"] ?? undefined, + topP: options["top_p"] ?? undefined + }; + } +} + +export function fromIntermediateToCompleteOpenAiMessages(messages: IntermediateOpenAiMessage[]) { + return messages.map((message) => { + if (message.content != null && LlamaText.isLlamaText(message.content)) + return { + ...message, + content: message.content.toString() + }; + + return message as OpenAiChatMessage; + }); +} + +export function fromChatHistoryToIntermediateOpenAiMessages({ + chatHistory, + chatWrapperSettings, + functionCalls, + functions, + useRawValues = true, + combineModelMessageAndToolCalls = true, + stringifyFunctionParams = true, + stringifyFunctionResults = true, + squashModelTextResponses = true +}: { + chatHistory: readonly ChatHistoryItem[], + chatWrapperSettings: ChatWrapperSettings, + functionCalls?: LlamaChatResponseFunctionCall[], + functions?: Functions, + useRawValues?: boolean, + combineModelMessageAndToolCalls?: boolean, + stringifyFunctionParams?: boolean, + stringifyFunctionResults?: boolean, + squashModelTextResponses?: boolean +}): IntermediateOpenAiConversionFromChatHistory { + const messages: IntermediateOpenAiMessage[] = []; + + for (let i = 0; i < chatHistory.length; i++) { + const item = chatHistory[i]; + if (item == null) + continue; + + if (item.type === "system") + messages.push({ + role: "system", + content: LlamaText.fromJSON(item.text) + }); + else if (item.type === "user") + messages.push({ + role: "user", + content: item.text + }); + else if (item.type === "model") { + let lastModelTextMessage: (IntermediateOpenAiMessage & {role: "assistant"}) | null = null; + const segmentStack: ChatModelSegmentType[] = []; + let canUseLastAssistantMessage = squashModelTextResponses; + + const addResponseText = (text: LlamaText | string) => { + const lastResItem = canUseLastAssistantMessage + ? messages.at(-1) + : undefined; + if (lastResItem?.role === "assistant" && (lastResItem.tool_calls == null || lastResItem.tool_calls.length === 0)) { + if (lastResItem.content == null) + lastResItem.content = text; + else + lastResItem.content = LlamaText([lastResItem.content, text]); + } else { + lastModelTextMessage = { + role: "assistant", + content: text + }; + messages.push(lastModelTextMessage); + canUseLastAssistantMessage = true; + } + }; + + for (let j = 0; j < item.response.length; j++) { + const response = item.response[j]; + if (response == null) + continue; + + if (typeof response === "string") + addResponseText(response); + else if (response.type === "segment") { + const segmentDefinition = getChatWrapperSegmentDefinition(chatWrapperSettings, response.segmentType); + + if (response.raw != null && useRawValues) + addResponseText(LlamaText.fromJSON(response.raw)); + else + addResponseText( + LlamaText([ + (segmentStack.length > 0 && segmentStack.at(-1) === response.segmentType) + ? "" + : segmentDefinition?.prefix ?? "", + response.text, + response.ended + ? (segmentDefinition?.suffix ?? "") + : "" + ]) + ); + + if (!response.ended && segmentStack.at(-1) !== response.segmentType) + segmentStack.push(response.segmentType); + else if (response.ended && segmentStack.at(-1) === response.segmentType) { + segmentStack.pop(); + + if (segmentStack.length === 0 && segmentDefinition?.suffix == null && + chatWrapperSettings.segments?.closeAllSegments != null + ) + addResponseText(LlamaText(chatWrapperSettings.segments.closeAllSegments)); + } + } else if (response.type === "functionCall") { + const toolCallId = generateToolCallId(i, j); + + if (lastModelTextMessage == null || + (!combineModelMessageAndToolCalls && lastModelTextMessage.content != null && lastModelTextMessage.content !== "") || + ( + response.startsNewChunk && + lastModelTextMessage.tool_calls != null && lastModelTextMessage.tool_calls.length > 0 + ) + ) { + lastModelTextMessage = { + role: "assistant" + }; + messages.push(lastModelTextMessage); + } + + lastModelTextMessage["tool_calls"] ||= []; + lastModelTextMessage["tool_calls"].push({ + id: toolCallId, + type: "function", + function: { + name: response.name, + arguments: stringifyFunctionParams + ? response.params === undefined + ? "" + : jsonDumps(response.params) + : response.params + } + }); + messages.push({ + role: "tool", + "tool_call_id": toolCallId, + content: stringifyFunctionResults + ? response.result === undefined + ? "" + : jsonDumps(response.result) + : response.result + }); + } + } + + addResponseText(""); + } else + void (item satisfies never); + } + + if (functionCalls != null && functionCalls.length > 0) { + let modelMessage = messages.at(-1); + const messageIndex = chatHistory.length - 1; + const functionCallStartIndex = modelMessage?.role === "assistant" + ? (modelMessage.tool_calls?.length ?? 0) + : 0; + + if (modelMessage?.role !== "assistant" || + (!combineModelMessageAndToolCalls && modelMessage.content != null && modelMessage.content !== "") + ) { + modelMessage = { + role: "assistant" + }; + messages.push(modelMessage); + } + + modelMessage["tool_calls"] ||= []; + + for (let i = 0; i < functionCalls.length; i++) { + const functionCall = functionCalls[i]; + if (functionCall == null) + continue; + + const toolCallId = generateToolCallId(messageIndex, functionCallStartIndex + i); + modelMessage["tool_calls"].push({ + id: toolCallId, + type: "function", + function: { + name: functionCall.functionName, + arguments: stringifyFunctionParams + ? functionCall.params === undefined + ? "" + : jsonDumps(functionCall.params) + : functionCall.params + } + }); + } + } + + const tools: OpenAiChatTool[] = []; + for (const [funcName, func] of Object.entries(functions ?? {})) + tools.push({ + type: "function", + function: { + name: funcName, + ...removeUndefinedFields({ + description: func.description, + parameters: func.params as GbnfJsonSchema + }) + } + }); + + return removeUndefinedFields({ + messages, + tools: tools.length > 0 + ? tools + : undefined + }); +} + +function fromOpenAiMessagesToChatHistory({ + messages, chatWrapper +}: { + messages: OpenAiChatMessage[], chatWrapper: ChatWrapper +}) { + const chatHistory: ChatHistoryItem[] = []; + const pendingFunctionCalls: LlamaChatResponseFunctionCall[] = []; + + const findToolCallResult = (startIndex: number, toolCallId: string | undefined, toolCallIndex: number) => { + let foundToolIndex: number = 0; + + for (let i = startIndex; i < messages.length; i++) { + const message = messages[i]; + if (message == null) + continue; + + if (message.role === "user" || message.role === "assistant") + break; + + if (message.role !== "tool") + continue; + + if (toolCallId == null) { + if (toolCallIndex === foundToolIndex) + return message; + else if (foundToolIndex > foundToolIndex) + return undefined; + } else if (message?.tool_call_id === toolCallId) + return message; + + foundToolIndex++; + } + + return undefined; + }; + + let lastUserOrAssistantMessageIndex = messages.length - 1; + for (let i = messages.length - 1; i >= 0; i--) { + const message = messages[i]; + if (message == null) + continue; + + if (message.role === "user" || message.role === "assistant") { + lastUserOrAssistantMessageIndex = i; + break; + } + } + + for (let i = 0; i < messages.length; i++) { + const message = messages[i]; + if (message == null) + continue; + + if (message.role === "system") { + if (message.content != null) + chatHistory.push({ + type: "system", + text: LlamaText(resolveOpenAiText(message.content)).toJSON() + }); + } else if (message.role === "user") + chatHistory.push({ + type: "user", + text: resolveOpenAiText(message.content) ?? "" + }); + else if (message.role === "assistant") { + const isLastAssistantMessage = i === lastUserOrAssistantMessageIndex; + + let chatItem = chatHistory.at(-1); + if (chatItem?.type !== "model") { + chatItem = { + type: "model", + response: [] + }; + chatHistory.push(chatItem); + } + + const text = resolveOpenAiText(message.content); + if (text != null && text !== "") { + const segmentDefinitions = new Map(); + + for (const segmentType of allSegmentTypes) { + const segmentDefinition = getChatWrapperSegmentDefinition(chatWrapper.settings, segmentType); + if (segmentDefinition != null) + segmentDefinitions.set(segmentType, { + prefix: LlamaText(segmentDefinition.prefix).toString(), + suffix: segmentDefinition.suffix != null + ? LlamaText(segmentDefinition.suffix).toString() + : undefined + }); + } + + const modelResponseSegments = segmentModelResponseText(text, { + segmentDefinitions, + closeAllSegments: chatWrapper.settings.segments?.closeAllSegments != null + ? LlamaText(chatWrapper.settings.segments.closeAllSegments).toString() + : undefined + }); + + for (const segment of modelResponseSegments) { + if (segment.type == null) { + if (typeof chatItem.response.at(-1) === "string") + chatItem.response[chatItem.response.length - 1] += segment.text; + else + chatItem.response.push(segment.text); + } else + chatItem.response.push({ + type: "segment", + segmentType: segment.type, + text: segment.text, + ended: segment.ended + }); + } + } + + let toolCallIndex = 0; + for (const toolCall of message.tool_calls ?? []) { + const functionName = toolCall.function.name; + const callParams = parseToolSerializedValue(toolCall.function.arguments); + + const toolCallResult = findToolCallResult(i + 1, toolCall.id, toolCallIndex); + if (toolCallResult == null) { + pendingFunctionCalls.push({ + functionName, + params: callParams, + raw: chatWrapper.generateFunctionCall(functionName, callParams).toJSON() + }); + } + + if (toolCallResult != null || !isLastAssistantMessage) + chatItem.response.push({ + type: "functionCall", + name: functionName, + params: callParams, + result: parseToolSerializedValue(toolCallResult?.content), + startsNewChunk: toolCallIndex === 0 + ? true + : undefined + }); + + toolCallIndex++; + } + } + } + + return { + chatHistory, + functionCalls: pendingFunctionCalls + }; +} + +export type IntermediateOpenAiConversionFromChatHistory = { + messages: IntermediateOpenAiMessage[], + tools?: OpenAiChatTool[] +}; + +export type OpenAiChatCreationOptions = { + messages: OpenAiChatMessage[], + tools?: OpenAiChatTool[], + "tool_choice"?: "none" | "auto", + + "logit_bias"?: Record | null, + "max_completion_tokens"?: number | null, + + /** Overridden by `"max_completion_tokens"` */ + "max_tokens"?: number | null, + + "parallel_tool_calls"?: boolean, + + /** + * Only used when a Llama instance is provided. + * A llama instance is provided through a context sequence. + */ + "response_format"?: { + type: "text" + } | { + type: "json_schema", + "json_schema": { + name: string, + description?: string, + schema?: GbnfJsonSchema, + strict?: boolean | null + } + } | { + type: "json_object" + }, + + seed?: number | null, + stop?: string | null | string[], + temperature?: number | null, + "min_p"?: number | null, + "top_p"?: number | null, + "top_k"?: number | null +}; + +type OpenAiChatTool = { + type: "function", + function: { + name: string, + description?: string, + parameters?: GbnfJsonSchema, + strict?: boolean | null + } +}; + +export type IntermediateOpenAiMessage = ( + Omit & {content: LlamaText | string} | + Omit & {content: LlamaText | string} | + Omit & {content: LlamaText | string} | + Omit & { + content?: LlamaText | string, + "tool_calls"?: Array<{ + id: string, + + type: "function", + function: { + name: string, + arguments: string | any + } + }> + } +); + +export type OpenAiChatMessage = OpenAiChatSystemMessage | OpenAiChatUserMessage | OpenAiChatAssistantMessage | OpenAiChatToolMessage; + +export type OpenAiChatSystemMessage = { + role: "system", + content: string | {type: "text", text: string}[] +}; +export type OpenAiChatUserMessage = { + role: "user", + content: string | {type: "text", text: string}[] +}; + +export type OpenAiChatAssistantMessage = { + role: "assistant", + content?: string | {type: "text", text: string}[] | null, + "tool_calls"?: Array<{ + id: string, + + type: "function", + function: { + name: string, + arguments: string + } + }> +}; +export type OpenAiChatToolMessage = { + role: "tool", + content: string | {type: "text", text: string}[], + "tool_call_id": string +}; + +function generateToolCallId(messageIndex: number, callIndex: number) { + const length = 9; + const start = "fc_" + String(messageIndex) + "_"; + + return start + String(callIndex).padStart(length - start.length, "0"); +} + +export function resolveOpenAiText(text: string | {type: "text", text: string}[]): string; +export function resolveOpenAiText(text: string | {type: "text", text: string}[] | null | undefined): string | null; +export function resolveOpenAiText(text: string | {type: "text", text: string}[] | null | undefined): string | null { + if (typeof text === "string") + return text; + + if (text instanceof Array) + return text.map((item) => item?.text ?? "").join(""); + + return null; +} + +function parseToolSerializedValue(value: string | {type: "text", text: string}[] | null | undefined) { + const text = resolveOpenAiText(value); + if (text == null || text === "") + return undefined; + + try { + return JSON.parse(text); + } catch (err) { + return text; + } +} + +function segmentModelResponseText(text: string, { + segmentDefinitions, closeAllSegments +}: { + segmentDefinitions: Map, + closeAllSegments?: string +}) { + const separatorActions = new Map(); + + for (const [segmentType, {prefix, suffix}] of segmentDefinitions) { + separatorActions.set(prefix, {type: "prefix", segmentType}); + + if (suffix != null) + separatorActions.set(suffix, {type: "suffix", segmentType}); + } + + if (closeAllSegments != null) + separatorActions.set(closeAllSegments, {type: "closeAll"}); + + const textParts = splitText(text, [...separatorActions.keys()]); + + const segments: Array<{ + type: S | undefined, + text: string, + ended: boolean + }> = []; + const stack: S[] = []; + const stackSet = new Set(); + + const pushTextToLastSegment = (text: string) => { + const lastSegment = segments.at(-1); + if (lastSegment != null && !lastSegment.ended) + lastSegment.text += text; + else + segments.push({ + type: undefined, + text, + ended: false + }); + }; + + for (const item of textParts) { + if (typeof item === "string" || !separatorActions.has(item.separator)) + pushTextToLastSegment( + typeof item === "string" + ? item + : item.separator + ); + else { + const action = separatorActions.get(item.separator)!; + + if (action.type === "closeAll") { + while (stack.length > 0) { + const segmentType = stack.pop()!; + stackSet.delete(segmentType); + + const lastSegment = segments.at(-1); + if (lastSegment != null && lastSegment.type != undefined && lastSegment.type === segmentType) + lastSegment.ended = true; + else + segments.push({type: segmentType, text: "", ended: true}); + } + } else if (action.type === "prefix") { + if (!stackSet.has(action.segmentType)) { + stack.push(action.segmentType); + stackSet.add(action.segmentType); + + segments.push({type: action.segmentType, text: "", ended: false}); + } else + pushTextToLastSegment(item.separator); + } else if (action.type === "suffix") { + const currentType = stack.at(-1); + if (currentType != null && currentType === action.segmentType) { + const lastSegment = segments.at(-1); + if (lastSegment != null && lastSegment.type != null && lastSegment.type === action.segmentType) { + lastSegment.ended = true; + + stack.pop(); + stackSet.delete(action.segmentType); + } else + segments.push({type: action.segmentType, text: "", ended: true}); + } else { + const segmentTypeIndex = stack.lastIndexOf(action.segmentType); + if (segmentTypeIndex < 0) + pushTextToLastSegment(item.separator); + else { + for (let i = stack.length - 1; i >= segmentTypeIndex; i--) { + const segmentType = stack.pop()!; + stackSet.delete(segmentType); + + segments.push({type: segmentType, text: "", ended: true}); + } + } + } + } + } + } + + return segments; +} diff --git a/src/utils/optionsMatrix.ts b/src/utils/optionsMatrix.ts new file mode 100644 index 00000000..281e38ed --- /dev/null +++ b/src/utils/optionsMatrix.ts @@ -0,0 +1,108 @@ +/** + * Iterate of all possible combinations of the given options. + * @example + * ```typescript + * for (const {a, b} of optionsMatrix({a: [1, 2], b: ["x", "y"]})) + * console.log(a, b); + * ``` + * + * Will output: + * ```txt + * 1 x + * 1 y + * 2 x + * 2 y + * ``` + */ +export function* optionsMatrix>(options: {[K in keyof T]: T[K][]}): Generator<{[K in keyof T]: T[K]}> { + const keys: Array = Object.keys(options); + const indexes = keys.map(() => 0); + + while (true) { + const result: any = {} as {[K in keyof T]: T[K]}; + for (let i = 0; i < keys.length; i++) { + const key = keys[i]!; + const keyOptions = options[key]!; + result[key] = keyOptions[indexes[i]!]; + } + + yield result; + + let moved: boolean = false; + for (let i = keys.length - 1; i >= 0; i--) { + const key = keys[i]!; + const keyOptions = options[key]!; + + if (indexes[i]! >= keyOptions.length - 1) { + if (i === 0) + return; + + indexes[i] = 0; + } else if (indexes[i]! < keyOptions.length - 1) { + indexes[i]!++; + moved = true; + break; + } + } + + if (!moved) + return; + } +} + +/** + * Iterate of all possible combinations of the given options and call the callback with each combination. + * + * The result of the first combination that doesn't throw an error will be returned as the result of this function. + * + * If all combinations throw an error, the error of the last combination will be thrown. + * @example + * ```typescript + * const result = tryMatrix({ + * a: [1, 2], + * b: ["x", "y"] + * }, ({a, b}) => { + * console.log(a, b); + * + * if (a === 2 && b === "y") + * return `success ${a} ${b}`; + * + * throw new Error("fail"); + * }); + * + * console.log(result); + * ``` + * + * Will output: + * ```txt + * 1 x + * 1 y + * 2 x + * 2 y + * success 2 y + * ``` + */ +export function tryMatrix, R>( + options: {[K in keyof T]: T[K][]}, + callback: (options: {[K in keyof T]: T[K]}) => R +): R { + let nextOption: {[K in keyof T]: T[K]} | undefined = undefined; + + for (const option of optionsMatrix(options)) { + if (nextOption == null) { + nextOption = option; + continue; + } + + try { + return callback(nextOption); + } catch (err) { + nextOption = option; + } + } + + if (nextOption != null) + return callback(nextOption); + + throw new Error("All options failed"); +} diff --git a/templates/electron-typescript-react/eslint.config.js b/templates/electron-typescript-react/eslint.config.js index 5c5f9b5f..4c307152 100644 --- a/templates/electron-typescript-react/eslint.config.js +++ b/templates/electron-typescript-react/eslint.config.js @@ -124,7 +124,8 @@ export default tseslint.config({ ] }], "@stylistic/no-trailing-spaces": ["off"], - "@stylistic/no-multi-spaces": ["warn"] + "@stylistic/no-multi-spaces": ["warn"], + "@stylistic/generator-star-spacing": ["off"] } }, { files: ["**/**.{ts,tsx}"], diff --git a/templates/electron-typescript-react/package.json b/templates/electron-typescript-react/package.json index 06cfe6da..0c8d5317 100644 --- a/templates/electron-typescript-react/package.json +++ b/templates/electron-typescript-react/package.json @@ -22,8 +22,8 @@ "clean": "rm -rf ./node_modules ./dist ./dist-electron ./release ./models" }, "dependencies": { - "@fontsource-variable/inter": "^5.1.1", - "birpc": "^2.0.19", + "@fontsource-variable/inter": "^5.2.5", + "birpc": "^2.2.0", "classnames": "^2.5.1", "highlight.js": "^11.11.1", "lifecycle-utils": "^2.0.0", @@ -32,30 +32,30 @@ "pretty-ms": "^9.2.0", "react": "^19.0.0", "react-dom": "^19.0.0", - "semver": "^7.7.0" + "semver": "^7.7.1" }, "devDependencies": { - "@stylistic/eslint-plugin": "^3.0.1", + "@stylistic/eslint-plugin": "^4.2.0", "@types/markdown-it": "^14.1.2", - "@types/react": "^19.0.8", - "@types/react-dom": "^19.0.3", + "@types/react": "^19.0.12", + "@types/react-dom": "^19.0.4", "@types/semver": "^7.5.8", "@vitejs/plugin-react": "^4.3.4", - "electron": "^34.0.2", + "electron": "^35.0.3", "electron-builder": "^25.1.8", - "eslint": "^9.19.0", - "eslint-import-resolver-typescript": "^3.7.0", + "eslint": "^9.23.0", + "eslint-import-resolver-typescript": "^4.2.2", "eslint-plugin-import": "^2.31.0", - "eslint-plugin-jsdoc": "^50.6.3", - "eslint-plugin-react-hooks": "^5.1.0", - "eslint-plugin-react-refresh": "^0.4.18", + "eslint-plugin-jsdoc": "^50.6.8", + "eslint-plugin-react-hooks": "^5.2.0", + "eslint-plugin-react-refresh": "^0.4.19", "rimraf": "^6.0.1", - "typescript": "^5.7.3", - "typescript-eslint": "^8.22.0", - "vite": "^6.0.11", + "typescript": "^5.8.2", + "typescript-eslint": "^8.27.0", + "vite": "^6.2.2", "vite-plugin-electron": "^0.29.0", "vite-plugin-electron-renderer": "^0.14.6", - "zx": "^8.3.1" + "zx": "^8.4.1" }, "overrides": { "electron-builder": { diff --git a/templates/node-typescript/eslint.config.js b/templates/node-typescript/eslint.config.js index 55bb4fa9..1dfcc9fc 100644 --- a/templates/node-typescript/eslint.config.js +++ b/templates/node-typescript/eslint.config.js @@ -127,7 +127,8 @@ export default tseslint.config({ ] }], "@stylistic/no-trailing-spaces": ["off"], - "@stylistic/no-multi-spaces": ["warn"] + "@stylistic/no-multi-spaces": ["warn"], + "@stylistic/generator-star-spacing": ["off"] } }, { files: ["**/**.{,c,m}ts"], diff --git a/templates/node-typescript/package.json b/templates/node-typescript/package.json index 19fc3f9e..9f77a046 100644 --- a/templates/node-typescript/package.json +++ b/templates/node-typescript/package.json @@ -19,7 +19,7 @@ } }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" }, "scripts": { "_postinstall": "npm run models:pull", @@ -38,17 +38,17 @@ "node-llama-cpp": "file:../.." }, "devDependencies": { - "@stylistic/eslint-plugin": "^3.0.1", - "@types/node": "^22.12.0", - "eslint": "^9.19.0", - "eslint-import-resolver-typescript": "^3.7.0", + "@stylistic/eslint-plugin": "^4.2.0", + "@types/node": "^22.13.11", + "eslint": "^9.23.0", + "eslint-import-resolver-typescript": "^4.2.2", "eslint-plugin-import": "^2.31.0", - "eslint-plugin-jsdoc": "^50.6.3", - "eslint-plugin-n": "^17.15.1", + "eslint-plugin-jsdoc": "^50.6.8", + "eslint-plugin-n": "^17.16.2", "rimraf": "^6.0.1", "tslib": "^2.8.1", - "typescript": "^5.7.3", - "typescript-eslint": "^8.22.0", - "vite-node": "^3.0.4" + "typescript": "^5.8.2", + "typescript-eslint": "^8.27.0", + "vite-node": "^3.0.9" } } diff --git a/templates/node-typescript/src/index.ts b/templates/node-typescript/src/index.ts index de8fd724..9a295ac8 100644 --- a/templates/node-typescript/src/index.ts +++ b/templates/node-typescript/src/index.ts @@ -19,7 +19,9 @@ console.log(chalk.yellow("Loading model...")); const model = await llama.loadModel({modelPath}); console.log(chalk.yellow("Creating context...")); -const context = await model.createContext(); +const context = await model.createContext({ + contextSize: {max: 8096} // omit this for a longer context size, but increased memory usage +}); const session = new LlamaChatSession({ contextSequence: context.getSequence() diff --git a/test/modelDependent/functionary/sanity.test.ts b/test/modelDependent/functionary/sanity.test.ts index 17d369c3..2b31f9a8 100644 --- a/test/modelDependent/functionary/sanity.test.ts +++ b/test/modelDependent/functionary/sanity.test.ts @@ -13,7 +13,9 @@ describe("functionary", () => { modelPath, checkTensors: true }); - const context = await model.createContext(); + const context = await model.createContext({ + contextSize: 2048 + }); const chatSession = new LlamaChatSession({ contextSequence: context.getSequence() }); diff --git a/test/standalone/chatWrappers/ChatMLChatWrapper.test.ts b/test/standalone/chatWrappers/ChatMLChatWrapper.test.ts index 76df46f6..245720d5 100644 --- a/test/standalone/chatWrappers/ChatMLChatWrapper.test.ts +++ b/test/standalone/chatWrappers/ChatMLChatWrapper.test.ts @@ -35,81 +35,51 @@ describe("ChatMLChatWrapper", () => { const chatWrapper = new ChatMLChatWrapper(); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|im_start|>system - ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|im_start|>system + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>user - ", - }, + "), "Hi there!", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>assistant - ", - }, + "), "Hello!", - ] + ]) `); const chatWrapper2 = new ChatMLChatWrapper(); const {contextText: contextText2} = chatWrapper2.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|im_start|>system - ", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|im_start|>system + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>user - ", - }, + "), "Hi there!", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>assistant - ", - }, + "), "Hello!", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>user - ", - }, + "), "How are you?", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>assistant - ", - }, + "), "I'm good, how are you?", - ] + ]) `); const chatWrapper3 = new ChatMLChatWrapper(); @@ -124,70 +94,43 @@ describe("ChatMLChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|im_start|>system - ", - }, + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|im_start|>system + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>user - ", - }, + "), "Hi there!", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>assistant - ", - }, + "), "Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|im_start|>system - ", - }, + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|im_start|>system + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>user - ", - }, + "), "Hi there!", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>assistant - ", - }, + "), "Hello!", - { - "type": "specialTokensText", - "value": "<|im_end|> + new SpecialTokensText("<|im_end|> <|im_start|>assistant - ", - }, - ] + "), + ]) `); }); }); diff --git a/test/standalone/chatWrappers/DeepSeekChatWrapper.test.ts b/test/standalone/chatWrappers/DeepSeekChatWrapper.test.ts new file mode 100644 index 00000000..413dcd74 --- /dev/null +++ b/test/standalone/chatWrappers/DeepSeekChatWrapper.test.ts @@ -0,0 +1,293 @@ +import {describe, expect, test} from "vitest"; +import {ChatHistoryItem, ChatModelFunctions, DeepSeekChatWrapper} from "../../../src/index.js"; +import {defaultChatSystemPrompt} from "../../../src/config.js"; + + +describe("DeepSeekChatWrapper", () => { + const conversationHistory: ChatHistoryItem[] = [{ + type: "system", + text: defaultChatSystemPrompt + }, { + type: "user", + text: "Hi there!" + }, { + type: "model", + response: ["Hello!"] + }]; + const conversationHistory2: ChatHistoryItem[] = [ + ...(new DeepSeekChatWrapper()).generateInitialChatHistory({systemPrompt: defaultChatSystemPrompt}), { + type: "user", + text: "Hi there!" + }, { + type: "model", + response: ["Hello!"] + }, { + type: "user", + text: "What is the time?" + }, { + type: "model", + response: [{ + type: "functionCall", + name: "getTime", + description: "Retrieve the current time", + params: { + hours: "24", + seconds: true + }, + result: "22:00:00" + }, "I'm good, how are you?"] + } + ]; + const conversationHistory2Functions: ChatModelFunctions = { + getTime: { + description: "Retrieve the current time", + params: { + type: "object", + properties: { + hours: { + enum: ["24", "12"] + }, + seconds: { + type: "boolean" + } + } + } + } + }; + const conversationHistory3: ChatHistoryItem[] = [ + ...(new DeepSeekChatWrapper()).generateInitialChatHistory({systemPrompt: defaultChatSystemPrompt}), { + type: "user", + text: "Hi there!" + }, { + type: "model", + response: ["Hello!"] + }, { + type: "user", + text: "What is the time?" + }, { + type: "model", + response: ["I'll fet some information for you", { + type: "functionCall", + name: "getTime", + description: "Retrieve the current time", + params: { + hours: "24", + seconds: true + }, + result: "22:00:00", + startsNewChunk: true + }, { + type: "functionCall", + name: "getDate", + description: "Retrieve the current date", + params: { + timezone: 0 + }, + result: "2025-03-20T00:00:00Z", + startsNewChunk: false + }, "I'm good, how are you?"] + } + ]; + const conversationHistory3Functions: ChatModelFunctions = { + getTime: { + description: "Retrieve the current time", + params: { + type: "object", + properties: { + hours: { + enum: ["24", "12"] + }, + seconds: { + type: "boolean" + } + } + } + }, + getDate: { + description: "Retrieve the current date", + params: { + type: "object", + properties: { + timezone: { + type: "integer" + } + } + } + } + }; + + test("should generate valid context text", () => { + const chatWrapper = new DeepSeekChatWrapper(); + const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); + + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", + new SpecialTokensText("<|User|>"), + "Hi there!", + new SpecialTokensText("<|Assistant|>"), + "Hello!", + ]) + `); + + const chatWrapper2 = new DeepSeekChatWrapper(); + const {contextText: contextText2} = chatWrapper2.generateContextState({ + chatHistory: conversationHistory2, + availableFunctions: conversationHistory2Functions + }); + + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. + + The assistant calls the provided functions as needed to retrieve information instead of relying on existing knowledge. + To fulfill a request, the assistant calls relevant functions in advance when needed before responding to the request, and does not tell the user prior to calling a function. + If the result of function calls from previous turns might be stale, the assistant will call the functions again if needed. + Provided functions: + {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}} + + Calling any of the provided functions can be done like this: + ", + new SpecialTokensText(""), + "{"someKey": "someValue"}", + new SpecialTokensText(""), + " + + Note that the verbatim ", + new SpecialTokensText(""), + "Hi there!", + new SpecialTokensText("<|Assistant|>"), + "Hello!", + new SpecialTokensText("<|end▁of▁sentence|><|User|>"), + "What is the time?", + new SpecialTokensText("<|Assistant|>"), + "{"hours": "24", "seconds": true}", + new SpecialTokensText("<|tool▁output▁begin|>"), + ""22:00:00"", + new SpecialTokensText("<|tool▁output▁end|> + "), + "I'm good, how are you?", + ]) + `); + + const chatWrapper3 = new DeepSeekChatWrapper(); + const {contextText: contextText3} = chatWrapper3.generateContextState({chatHistory: conversationHistory}); + const {contextText: contextText3WithOpenModelResponse} = chatWrapper3.generateContextState({ + chatHistory: [ + ...conversationHistory, + { + type: "model", + response: [] + } + ] + }); + + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", + new SpecialTokensText("<|User|>"), + "Hi there!", + new SpecialTokensText("<|Assistant|>"), + "Hello!", + ]) + `); + + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", + new SpecialTokensText("<|User|>"), + "Hi there!", + new SpecialTokensText("<|Assistant|>"), + "Hello!", + new SpecialTokensText("<|end▁of▁sentence|><|Assistant|>"), + ]) + `); + }); + + test("should generate valid context text for 2 sequential function calls", () => { + const chatWrapper = new DeepSeekChatWrapper(); + const {contextText} = chatWrapper.generateContextState({ + chatHistory: conversationHistory3, + availableFunctions: conversationHistory3Functions + }); + + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. + + The assistant calls the provided functions as needed to retrieve information instead of relying on existing knowledge. + To fulfill a request, the assistant calls relevant functions in advance when needed before responding to the request, and does not tell the user prior to calling a function. + If the result of function calls from previous turns might be stale, the assistant will call the functions again if needed. + Provided functions: + {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}} + + {"name": "getDate", "description": "Retrieve the current date", "parameters": {"type": "object", "properties": {"timezone": {"type": "integer"}}}} + + Calling any of the provided functions can be done like this: + ", + new SpecialTokensText(""), + "{"someKey": "someValue"}", + new SpecialTokensText(""), + " + + Note that the verbatim ", + new SpecialTokensText(""), + "Hi there!", + new SpecialTokensText("<|Assistant|>"), + "Hello!", + new SpecialTokensText("<|end▁of▁sentence|><|User|>"), + "What is the time?", + new SpecialTokensText("<|Assistant|>"), + "I'll fet some information for you", + new SpecialTokensText(""), + "{"hours": "24", "seconds": true}", + new SpecialTokensText("<|tool▁output▁begin|>"), + ""22:00:00"", + new SpecialTokensText("<|tool▁output▁end|> + "), + "{"timezone": 0}", + new SpecialTokensText("<|tool▁output▁begin|>"), + ""2025-03-20T00:00:00Z"", + new SpecialTokensText("<|tool▁output▁end|> + "), + "I'm good, how are you?", + ]) + `); + }); +}); diff --git a/test/standalone/chatWrappers/FalconChatWrapper.test.ts b/test/standalone/chatWrappers/FalconChatWrapper.test.ts index 9fd831f3..8b752c5e 100644 --- a/test/standalone/chatWrappers/FalconChatWrapper.test.ts +++ b/test/standalone/chatWrappers/FalconChatWrapper.test.ts @@ -35,30 +35,24 @@ describe("FalconChatWrapper", () => { const chatWrapper = new FalconChatWrapper(); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. User: Hi there! Assistant: Hello!", - ] + ]) `); const chatWrapper2 = new FalconChatWrapper(); const {contextText: contextText2} = chatWrapper2.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -69,7 +63,7 @@ describe("FalconChatWrapper", () => { User: How are you? Assistant: I'm good, how are you?", - ] + ]) `); const chatWrapper3 = new FalconChatWrapper(); @@ -84,27 +78,21 @@ describe("FalconChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. User: Hi there! Assistant: Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -113,7 +101,7 @@ describe("FalconChatWrapper", () => { Assistant: Hello! Assistant: ", - ] + ]) `); }); }); diff --git a/test/standalone/chatWrappers/FunctionaryChatWrapper.test.ts b/test/standalone/chatWrappers/FunctionaryChatWrapper.test.ts index 1980f487..0cf624e1 100644 --- a/test/standalone/chatWrappers/FunctionaryChatWrapper.test.ts +++ b/test/standalone/chatWrappers/FunctionaryChatWrapper.test.ts @@ -139,65 +139,35 @@ describe("FunctionaryChatWrapper", () => { const chatWrapper = new FunctionaryChatWrapper({variation: "v2.llama3"}); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "How are you?", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "I'm good, how are you?", - ] + ]) `); const chatWrapper2 = new FunctionaryChatWrapper({variation: "v2.llama3"}); @@ -206,18 +176,12 @@ describe("FunctionaryChatWrapper", () => { availableFunctions: functions }); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "// Supported function definitions that should be called when necessary. namespace functions { @@ -244,134 +208,65 @@ describe("FunctionaryChatWrapper", () => { }) => any; } // namespace functions", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "The assistant calls functions with appropriate input when necessary. The assistant writes <|stop|> when finished answering.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Role a dice twice and tell me the total result", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|reserved_special_token_249|>", - }, + new SpecialToken("EOT"), + new SpecialTokensText("<|reserved_special_token_249|>"), "getRandomNumber", - { - "type": "specialTokensText", - "value": " - ", - }, + new SpecialTokensText(" + "), "{"min": 1, "max": 6}", - { - "type": "specialTokensText", - "value": "<|reserved_special_token_249|>", - }, + new SpecialTokensText("<|reserved_special_token_249|>"), "getRandomNumber", - { - "type": "specialTokensText", - "value": " - ", - }, + new SpecialTokensText(" + "), "{"min": 1, "max": 6}", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>tool<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>tool<|end_header_id|> - name=", - }, + name="), "getRandomNumber", - { - "type": "specialTokensText", - "value": " - ", - }, + new SpecialTokensText(" + "), "3", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>tool<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>tool<|end_header_id|> - name=", - }, + name="), "getRandomNumber", - { - "type": "specialTokensText", - "value": " - ", - }, + new SpecialTokensText(" + "), "4", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "The total result of rolling the dice twice is 3 + 4 = 7.", - ] + ]) `); const chatWrapper3 = new FunctionaryChatWrapper({variation: "v2.llama3"}); @@ -386,136 +281,70 @@ describe("FunctionaryChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "How are you?", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "I'm good, how are you?", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "How are you?", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "I'm good, how are you?", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, - ] + "), + ]) `); }); }); @@ -525,53 +354,35 @@ describe("FunctionaryChatWrapper", () => { const chatWrapper = new FunctionaryChatWrapper({variation: "v2"}); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|from|>system + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|from|>system <|recipient|>all - <|content|>", - }, + <|content|>"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>user <|recipient|>all - <|content|>", - }, + <|content|>"), "Hi there!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant <|recipient|>all - <|content|>", - }, + <|content|>"), "Hello!", - { - "type": "specialTokensText", - "value": "<|stop|> + new SpecialTokensText("<|stop|> <|from|>user <|recipient|>all - <|content|>", - }, + <|content|>"), "How are you?", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant <|recipient|>all - <|content|>", - }, + <|content|>"), "I'm good, how are you?", - ] + ]) `); const chatWrapper2 = new FunctionaryChatWrapper({variation: "v2"}); @@ -580,18 +391,12 @@ describe("FunctionaryChatWrapper", () => { availableFunctions: functions }); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|from|>system + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|from|>system <|recipient|>all - <|content|>", - }, + <|content|>"), "// Supported function definitions that should be called when necessary. namespace functions { @@ -618,108 +423,66 @@ describe("FunctionaryChatWrapper", () => { }) => any; } // namespace functions", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>system <|recipient|>all - <|content|>", - }, + <|content|>"), "The assistant calls functions with appropriate input when necessary. The assistant writes <|stop|> when finished answering.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>system <|recipient|>all - <|content|>", - }, + <|content|>"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>user <|recipient|>all - <|content|>", - }, + <|content|>"), "Hi there!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant <|recipient|>all - <|content|>", - }, + <|content|>"), "Hello!", - { - "type": "specialTokensText", - "value": "<|stop|> + new SpecialTokensText("<|stop|> <|from|>user <|recipient|>all - <|content|>", - }, + <|content|>"), "Role a dice twice and tell me the total result", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant - <|recipient|>", - }, + <|recipient|>"), "getRandomNumber", - { - "type": "specialTokensText", - "value": " - <|content|>", - }, + new SpecialTokensText(" + <|content|>"), "{"min": 1, "max": 6}", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant - <|recipient|>", - }, + <|recipient|>"), "getRandomNumber", - { - "type": "specialTokensText", - "value": " - <|content|>", - }, + new SpecialTokensText(" + <|content|>"), "{"min": 1, "max": 6}", - { - "type": "specialTokensText", - "value": "<|stop|> - <|from|>", - }, + new SpecialTokensText("<|stop|> + <|from|>"), "getRandomNumber", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|recipient|>all - <|content|>", - }, + <|content|>"), "3", - { - "type": "specialTokensText", - "value": " - <|from|>", - }, + new SpecialTokensText(" + <|from|>"), "getRandomNumber", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|recipient|>all - <|content|>", - }, + <|content|>"), "4", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant <|recipient|>all - <|content|>", - }, + <|content|>"), "The total result of rolling the dice twice is 3 + 4 = 7.", - ] + ]) `); const chatWrapper3 = new FunctionaryChatWrapper({variation: "v2"}); @@ -734,109 +497,70 @@ describe("FunctionaryChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|from|>system + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|from|>system <|recipient|>all - <|content|>", - }, + <|content|>"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>user <|recipient|>all - <|content|>", - }, + <|content|>"), "Hi there!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant <|recipient|>all - <|content|>", - }, + <|content|>"), "Hello!", - { - "type": "specialTokensText", - "value": "<|stop|> + new SpecialTokensText("<|stop|> <|from|>user <|recipient|>all - <|content|>", - }, + <|content|>"), "How are you?", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant <|recipient|>all - <|content|>", - }, + <|content|>"), "I'm good, how are you?", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|from|>system + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|from|>system <|recipient|>all - <|content|>", - }, + <|content|>"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>user <|recipient|>all - <|content|>", - }, + <|content|>"), "Hi there!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant <|recipient|>all - <|content|>", - }, + <|content|>"), "Hello!", - { - "type": "specialTokensText", - "value": "<|stop|> + new SpecialTokensText("<|stop|> <|from|>user <|recipient|>all - <|content|>", - }, + <|content|>"), "How are you?", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <|from|>assistant <|recipient|>all - <|content|>", - }, + <|content|>"), "I'm good, how are you?", - { - "type": "specialTokensText", - "value": "<|stop|> + new SpecialTokensText("<|stop|> <|from|>assistant <|recipient|>all - <|content|>", - }, - ] + <|content|>"), + ]) `); }); }); diff --git a/test/standalone/chatWrappers/GemmaChatWrapper.test.ts b/test/standalone/chatWrappers/GemmaChatWrapper.test.ts index a8ea8c58..13fad544 100644 --- a/test/standalone/chatWrappers/GemmaChatWrapper.test.ts +++ b/test/standalone/chatWrappers/GemmaChatWrapper.test.ts @@ -35,74 +35,50 @@ describe("GemmaChatWrapper", () => { const chatWrapper = new GemmaChatWrapper(); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "user - ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("user + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. --- Hi there!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" model - ", - }, + "), "Hello!", - ] + ]) `); const {contextText: contextText2} = chatWrapper.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "user - ", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("user + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. --- Hi there!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" model - ", - }, + "), "Hello!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" user - ", - }, + "), "How are you?", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" model - ", - }, + "), "I'm good, how are you?", - ] + ]) `); const {contextText: contextText3} = chatWrapper.generateContextState({chatHistory: conversationHistory}); @@ -116,60 +92,42 @@ describe("GemmaChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "user - ", - }, + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("user + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. --- Hi there!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" model - ", - }, + "), "Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "user - ", - }, + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("user + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. --- Hi there!", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" model - ", - }, + "), "Hello! ", - ] + ]) `); }); }); diff --git a/test/standalone/chatWrappers/GeneralChatWrapper.test.ts b/test/standalone/chatWrappers/GeneralChatWrapper.test.ts index cc9583b7..641616a1 100644 --- a/test/standalone/chatWrappers/GeneralChatWrapper.test.ts +++ b/test/standalone/chatWrappers/GeneralChatWrapper.test.ts @@ -35,12 +35,9 @@ describe("GeneralChatWrapper", () => { const chatWrapper = new GeneralChatWrapper(); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -49,18 +46,15 @@ describe("GeneralChatWrapper", () => { ### Assistant Hello!", - ] + ]) `); const chatWrapper2 = new GeneralChatWrapper(); const {contextText: contextText2} = chatWrapper2.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -75,7 +69,7 @@ describe("GeneralChatWrapper", () => { ### Assistant I'm good, how are you?", - ] + ]) `); const chatWrapper3 = new GeneralChatWrapper(); @@ -90,12 +84,9 @@ describe("GeneralChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -104,15 +95,12 @@ describe("GeneralChatWrapper", () => { ### Assistant Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -124,7 +112,7 @@ describe("GeneralChatWrapper", () => { ### Assistant ", - ] + ]) `); }); @@ -135,12 +123,9 @@ describe("GeneralChatWrapper", () => { }); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -149,7 +134,7 @@ describe("GeneralChatWrapper", () => { ### Response Hello!", - ] + ]) `); const chatWrapper2 = new GeneralChatWrapper({ @@ -158,12 +143,9 @@ describe("GeneralChatWrapper", () => { }); const {contextText: contextText2} = chatWrapper2.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -178,7 +160,7 @@ describe("GeneralChatWrapper", () => { ### Response I'm good, how are you?", - ] + ]) `); }); }); diff --git a/test/standalone/chatWrappers/Llama2ChatWrapper.test.ts b/test/standalone/chatWrappers/Llama2ChatWrapper.test.ts index b871d647..fabd7bd4 100644 --- a/test/standalone/chatWrappers/Llama2ChatWrapper.test.ts +++ b/test/standalone/chatWrappers/Llama2ChatWrapper.test.ts @@ -35,83 +35,47 @@ describe("Llama2ChatWrapper", () => { const chatWrapper = new Llama2ChatWrapper(); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] <> - ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] <> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, + "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - ] + ]) `); const chatWrapper2 = new Llama2ChatWrapper(); const {contextText: contextText2} = chatWrapper2.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] <> - ", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] <> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, + "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + new SpecialToken("EOS"), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "How are you?", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "I'm good, how are you?", - ] + ]) `); const chatWrapper3 = new Llama2ChatWrapper(); @@ -126,64 +90,40 @@ describe("Llama2ChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] <> - ", - }, + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] <> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, + "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] <> - ", - }, + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] <> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, + "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello! ", - ] + ]) `); }); }); diff --git a/test/standalone/chatWrappers/Llama3ChatWrapper.test.ts b/test/standalone/chatWrappers/Llama3ChatWrapper.test.ts index e25d76cc..55cc5d24 100644 --- a/test/standalone/chatWrappers/Llama3ChatWrapper.test.ts +++ b/test/standalone/chatWrappers/Llama3ChatWrapper.test.ts @@ -35,107 +35,59 @@ describe("Llama3ChatWrapper", () => { const chatWrapper = new Llama3ChatWrapper(); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - ] + ]) `); const chatWrapper2 = new Llama3ChatWrapper(); const {contextText: contextText2} = chatWrapper2.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "How are you?", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "I'm good, how are you?", - ] + ]) `); const chatWrapper3 = new Llama3ChatWrapper(); @@ -150,84 +102,48 @@ describe("Llama3ChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello! ", - ] + ]) `); }); }); diff --git a/test/standalone/chatWrappers/Llama3_1ChatWrapper.test.ts b/test/standalone/chatWrappers/Llama3_1ChatWrapper.test.ts index 1ef8c02b..fe8530da 100644 --- a/test/standalone/chatWrappers/Llama3_1ChatWrapper.test.ts +++ b/test/standalone/chatWrappers/Llama3_1ChatWrapper.test.ts @@ -59,18 +59,12 @@ describe("Llama3_1ChatWrapper", () => { const chatWrapper = new Llama3_1ChatWrapper({todayDate}); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "Cutting Knowledge Date: December 2023 Today Date: 26 Jul 2024 @@ -81,29 +75,17 @@ describe("Llama3_1ChatWrapper", () => { You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - ] + ]) `); const chatWrapper2 = new Llama3_1ChatWrapper({todayDate}); @@ -112,18 +94,12 @@ describe("Llama3_1ChatWrapper", () => { availableFunctions: conversationHistory2Functions }); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "Cutting Knowledge Date: December 2023 Today Date: 26 Jul 2024 @@ -148,20 +124,11 @@ describe("Llama3_1ChatWrapper", () => { Here is an example, ", - { - "type": "specialTokensText", - "value": "", - }, + new SpecialTokensText(">"), "{"example_name": "example_value"}", - { - "type": "specialTokensText", - "value": "", - }, + new SpecialTokensText(""), " Reminder: @@ -176,75 +143,39 @@ describe("Llama3_1ChatWrapper", () => { You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "What is the time?", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> - - assistant<|end_header_id|> + + ", - }, + new SpecialTokensText(">"), "{"hours": "24", "seconds": true}", - { - "type": "specialTokensText", - "value": "<|eom_id|> + new SpecialTokensText("<|eom_id|> <|start_header_id|>ipython<|end_header_id|> - ", - }, + "), ""22:00:00"", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "I'm good, how are you?", - ] + ]) `); const chatWrapper3 = new Llama3_1ChatWrapper({todayDate}); @@ -259,18 +190,12 @@ describe("Llama3_1ChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "Cutting Knowledge Date: December 2023 Today Date: 26 Jul 2024 @@ -281,43 +206,25 @@ describe("Llama3_1ChatWrapper", () => { You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>system<|end_header_id|> + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> - ", - }, + "), "Cutting Knowledge Date: December 2023 Today Date: 26 Jul 2024 @@ -328,31 +235,19 @@ describe("Llama3_1ChatWrapper", () => { You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>user<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> - ", - }, + "), "Hi there!", - { - "type": "specialToken", - "value": "EOT", - }, - { - "type": "specialTokensText", - "value": "<|start_header_id|>assistant<|end_header_id|> + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> - ", - }, + "), "Hello! ", - ] + ]) `); }); }); diff --git a/test/standalone/chatWrappers/MistralChatWrapper.test.ts b/test/standalone/chatWrappers/MistralChatWrapper.test.ts index 8808d436..7eef8372 100644 --- a/test/standalone/chatWrappers/MistralChatWrapper.test.ts +++ b/test/standalone/chatWrappers/MistralChatWrapper.test.ts @@ -59,26 +59,17 @@ describe("MistralChatWrapper", () => { const chatWrapper = new MistralChatWrapper(); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST]", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST]"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. Hi there!", - { - "type": "specialTokensText", - "value": "[/INST]", - }, + new SpecialTokensText("[/INST]"), "Hello!", - ] + ]) `); const chatWrapper2 = new MistralChatWrapper(); @@ -87,59 +78,29 @@ describe("MistralChatWrapper", () => { availableFunctions: conversationHistory2Functions }); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST]", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST]"), "Hi there!", - { - "type": "specialTokensText", - "value": "[/INST]", - }, + new SpecialTokensText("[/INST]"), "Hello!", - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialTokensText", - "value": "[AVAILABLE_TOOLS]", - }, + new SpecialToken("EOS"), + new SpecialTokensText("[AVAILABLE_TOOLS]"), "[{"type": "function", "function": {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}}]", - { - "type": "specialTokensText", - "value": "[/AVAILABLE_TOOLS][INST]", - }, + new SpecialTokensText("[/AVAILABLE_TOOLS][INST]"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. What is the time?", - { - "type": "specialTokensText", - "value": "[/INST][TOOL_CALLS]", - }, + new SpecialTokensText("[/INST][TOOL_CALLS]"), "[{"name": "getTime", "arguments": {"hours": "24", "seconds": true}}]", - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialTokensText", - "value": "[TOOL_RESULTS]", - }, - "[{"name": "getTime", "content": "22:00:00"}]", - { - "type": "specialTokensText", - "value": "[/TOOL_RESULTS]", - }, + new SpecialToken("EOS"), + new SpecialTokensText("[TOOL_RESULTS]"), + "{"name": "getTime", "content": "22:00:00"}", + new SpecialTokensText("[/TOOL_RESULTS]"), "I'm good, how are you?", - ] + ]) `); const chatWrapper3 = new MistralChatWrapper(); @@ -154,50 +115,32 @@ describe("MistralChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST]", - }, + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST]"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. Hi there!", - { - "type": "specialTokensText", - "value": "[/INST]", - }, + new SpecialTokensText("[/INST]"), "Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST]", - }, + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST]"), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. Hi there!", - { - "type": "specialTokensText", - "value": "[/INST]", - }, + new SpecialTokensText("[/INST]"), "Hello! ", - ] + ]) `); }); }); diff --git a/test/standalone/chatWrappers/QwenChatWrapper.test.ts b/test/standalone/chatWrappers/QwenChatWrapper.test.ts new file mode 100644 index 00000000..577bfa14 --- /dev/null +++ b/test/standalone/chatWrappers/QwenChatWrapper.test.ts @@ -0,0 +1,344 @@ +import {describe, expect, test} from "vitest"; +import {ChatHistoryItem, ChatModelFunctions, QwenChatWrapper} from "../../../src/index.js"; +import {defaultChatSystemPrompt} from "../../../src/config.js"; + + +describe("QwenChatWrapper", () => { + const conversationHistory: ChatHistoryItem[] = [{ + type: "system", + text: defaultChatSystemPrompt + }, { + type: "user", + text: "Hi there!" + }, { + type: "model", + response: ["Hello!"] + }]; + const conversationHistory2: ChatHistoryItem[] = [ + ...(new QwenChatWrapper()).generateInitialChatHistory({systemPrompt: defaultChatSystemPrompt}), { + type: "user", + text: "Hi there!" + }, { + type: "model", + response: ["Hello!"] + }, { + type: "user", + text: "What is the time?" + }, { + type: "model", + response: [{ + type: "functionCall", + name: "getTime", + description: "Retrieve the current time", + params: { + hours: "24", + seconds: true + }, + result: "22:00:00" + }, "I'm good, how are you?"] + } + ]; + const conversationHistory2Functions: ChatModelFunctions = { + getTime: { + description: "Retrieve the current time", + params: { + type: "object", + properties: { + hours: { + enum: ["24", "12"] + }, + seconds: { + type: "boolean" + } + } + } + } + }; + const conversationHistory3: ChatHistoryItem[] = [ + ...(new QwenChatWrapper()).generateInitialChatHistory({systemPrompt: defaultChatSystemPrompt}), { + type: "user", + text: "Hi there!" + }, { + type: "model", + response: ["Hello!"] + }, { + type: "user", + text: "What is the time?" + }, { + type: "model", + response: ["I'll fet some information for you", { + type: "functionCall", + name: "getTime", + description: "Retrieve the current time", + params: { + hours: "24", + seconds: true + }, + result: "22:00:00", + startsNewChunk: true + }, { + type: "functionCall", + name: "getDate", + description: "Retrieve the current date", + params: { + timezone: 0 + }, + result: "2025-03-20T00:00:00Z", + startsNewChunk: false + }, "I'm good, how are you?"] + } + ]; + const conversationHistory3Functions: ChatModelFunctions = { + getTime: { + description: "Retrieve the current time", + params: { + type: "object", + properties: { + hours: { + enum: ["24", "12"] + }, + seconds: { + type: "boolean" + } + } + } + }, + getDate: { + description: "Retrieve the current date", + params: { + type: "object", + properties: { + timezone: { + type: "integer" + } + } + } + } + }; + + test("should generate valid context text", () => { + const chatWrapper = new QwenChatWrapper(); + const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); + + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<|im_start|>system + "), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", + new SpecialTokensText("<|im_end|> + <|im_start|>user + "), + "Hi there!", + new SpecialTokensText("<|im_end|> + <|im_start|>assistant + "), + "Hello!", + ]) + `); + + const chatWrapper2 = new QwenChatWrapper(); + const {contextText: contextText2} = chatWrapper2.generateContextState({ + chatHistory: conversationHistory2, + availableFunctions: conversationHistory2Functions + }); + + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<|im_start|>system + "), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. + + # Tools + + You may call one or more functions to assist with the user query. + + You are provided with function signatures within ", + new SpecialTokensText(""), + " XML tags: + ", + new SpecialTokensText(""), + " + {"type": "function", "function": {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}} + ", + new SpecialTokensText(""), + " + + For each function call, return a json object with function name and arguments within ", + new SpecialTokensText(""), + " XML tags: + ", + new SpecialTokensText(""), + " + {"name": , "arguments": } + ", + new SpecialTokensText("<|im_end|> + <|im_start|>user + "), + "Hi there!", + new SpecialTokensText("<|im_end|> + <|im_start|>assistant + "), + "Hello!", + new SpecialTokensText("<|im_end|> + <|im_start|>user + "), + "What is the time?", + new SpecialTokensText("<|im_end|> + <|im_start|>assistant + "), + " + ", + new SpecialTokensText(""), + " + {"name": "getTime", "arguments": {"hours": "24", "seconds": true}} + ", + new SpecialTokensText("<|im_end|> + <|im_start|>user + + "), + ""22:00:00"", + new SpecialTokensText(" + <|im_end|> + <|im_start|>assistant + "), + "I'm good, how are you?", + ]) + `); + + const chatWrapper3 = new QwenChatWrapper(); + const {contextText: contextText3} = chatWrapper3.generateContextState({chatHistory: conversationHistory}); + const {contextText: contextText3WithOpenModelResponse} = chatWrapper3.generateContextState({ + chatHistory: [ + ...conversationHistory, + { + type: "model", + response: [] + } + ] + }); + + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<|im_start|>system + "), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", + new SpecialTokensText("<|im_end|> + <|im_start|>user + "), + "Hi there!", + new SpecialTokensText("<|im_end|> + <|im_start|>assistant + "), + "Hello!", + ]) + `); + + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<|im_start|>system + "), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", + new SpecialTokensText("<|im_end|> + <|im_start|>user + "), + "Hi there!", + new SpecialTokensText("<|im_end|> + <|im_start|>assistant + "), + "Hello!", + new SpecialTokensText("<|im_end|> + <|im_start|>assistant + "), + ]) + `); + }); + + test("should generate valid context text for 2 sequential function calls", () => { + const chatWrapper = new QwenChatWrapper(); + const {contextText} = chatWrapper.generateContextState({ + chatHistory: conversationHistory3, + availableFunctions: conversationHistory3Functions + }); + + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<|im_start|>system + "), + "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. + + # Tools + + You may call one or more functions to assist with the user query. + + You are provided with function signatures within ", + new SpecialTokensText(""), + " XML tags: + ", + new SpecialTokensText(""), + " + {"type": "function", "function": {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}} + {"type": "function", "function": {"name": "getDate", "description": "Retrieve the current date", "parameters": {"type": "object", "properties": {"timezone": {"type": "integer"}}}}} + ", + new SpecialTokensText(""), + " + + For each function call, return a json object with function name and arguments within ", + new SpecialTokensText(""), + " XML tags: + ", + new SpecialTokensText(""), + " + {"name": , "arguments": } + ", + new SpecialTokensText("<|im_end|> + <|im_start|>user + "), + "Hi there!", + new SpecialTokensText("<|im_end|> + <|im_start|>assistant + "), + "Hello!", + new SpecialTokensText("<|im_end|> + <|im_start|>user + "), + "What is the time?", + new SpecialTokensText("<|im_end|> + <|im_start|>assistant + "), + "I'll fet some information for you + ", + new SpecialTokensText(""), + " + {"name": "getTime", "arguments": {"hours": "24", "seconds": true}} + ", + new SpecialTokensText(""), + " + ", + new SpecialTokensText(""), + " + {"name": "getDate", "arguments": {"timezone": 0}} + ", + new SpecialTokensText("<|im_end|> + <|im_start|>user + + "), + ""22:00:00"", + new SpecialTokensText(" + + + "), + ""2025-03-20T00:00:00Z"", + new SpecialTokensText(" + <|im_end|> + <|im_start|>assistant + "), + "I'm good, how are you?", + ]) + `); + }); +}); diff --git a/test/standalone/chatWrappers/generic/JinjaTemplateChatWrapper.test.ts b/test/standalone/chatWrappers/generic/JinjaTemplateChatWrapper.test.ts index ac7cab69..d09c4405 100644 --- a/test/standalone/chatWrappers/generic/JinjaTemplateChatWrapper.test.ts +++ b/test/standalone/chatWrappers/generic/JinjaTemplateChatWrapper.test.ts @@ -1,7 +1,204 @@ import {describe, expect, test} from "vitest"; -import {ChatHistoryItem, JinjaTemplateChatWrapper} from "../../../../src/index.js"; +import {Template} from "@huggingface/jinja"; +import {ChatHistoryItem, ChatModelFunctions, JinjaTemplateChatWrapper} from "../../../../src/index.js"; import {defaultChatSystemPrompt} from "../../../../src/config.js"; - +import {LlamaText} from "../../../../src/utils/LlamaText.js"; +import {fromChatHistoryToIntermediateOpenAiMessages, fromIntermediateToCompleteOpenAiMessages} from "../../../../src/utils/OpenAIFormat.js"; +import {removeUndefinedFields} from "../../../../src/utils/removeNullFields.js"; + +const mistralJinjaTemplate = ` +{%- if messages[0]["role"] == "system" -%} + {%- set system_message = messages[0]["content"] -%} + {%- set loop_messages = messages[1:] -%} +{%- else -%} + {%- set loop_messages = messages -%} +{%- endif -%} +{%- if not tools is defined -%} + {%- set tools = none -%} +{%- endif -%} +{%- set user_messages = loop_messages | selectattr("role", "equalto", "user") | list -%} +{{- bos_token -}} +{%- for message in loop_messages -%} + {%- if message["role"] == "user" -%} + {%- if tools is not none and (message == user_messages[-1]) -%} + {{- "[AVAILABLE_TOOLS][" -}} + {%- for tool in tools -%} + {%- set tool = tool.function -%} + {{- '{"type": "function", "function": {' -}} + {%- for key, val in tool.items() if key != "return" -%} + {%- if val is string -%} + {{- '"' + key + '": "' + val + '"' -}} + {%- else -%} + {{- '"' + key + '": ' + val|tojson -}} + {%- endif -%} + {%- if not loop.last -%} + {{- ", " -}} + {%- endif -%} + {%- endfor -%} + {{- "}}" -}} + {%- if not loop.last -%} + {{- ", " -}} + {%- else -%} + {{- "]" -}} + {%- endif -%} + {%- endfor -%} + {{- "[/AVAILABLE_TOOLS]" -}} + {%- endif -%} + {%- if loop.last and system_message is defined -%} + {{- "[INST]" + system_message + "\\n\\n" + message["content"] + "[/INST]" -}} + {%- else -%} + {{- "[INST]" + message["content"] + "[/INST]" -}} + {%- endif -%} + {%- elif message["role"] == "tool_calls" or message.tool_calls is defined -%} + {%- if message.tool_calls is defined -%} + {%- set tool_calls = message.tool_calls -%} + {%- else -%} + {%- set tool_calls = message.content -%} + {%- endif -%} + {{- "[TOOL_CALLS][" -}} + {%- for tool_call in tool_calls -%} + {%- set out = tool_call.function|tojson -%} + {{- out[:-1] -}} + {%- if not tool_call.id is defined or tool_call.id|length != 9 -%} + {{- raise_exception("Tool call IDs should be alphanumeric strings with length 9!") -}} + {%- endif -%} + {{- ', "id": "' + tool_call.id + '"}' -}} + {%- if not loop.last -%} + {{- ", " -}} + {%- else -%} + {{- "]" + eos_token -}} + {%- endif -%} + {%- endfor -%} + {%- elif message["role"] == "assistant" -%} + {{- message["content"] + eos_token -}} + {%- elif message["role"] == "tool_results" or message["role"] == "tool" -%} + {%- if message.content is defined and message.content.content is defined -%} + {%- set content = message.content.content -%} + {%- else -%} + {%- set content = message.content -%} + {%- endif -%} + {{- '[TOOL_RESULTS]{"content": ' + content|string + ", " -}} + {%- if not message.tool_call_id is defined or message.tool_call_id|length != 9 -%} + {{- raise_exception("Tool call IDs should be alphanumeric strings with length 9!") -}} + {%- endif -%} + {{- '"call_id": "' + message.tool_call_id + '"}[/TOOL_RESULTS]' -}} + {%- else -%} + {{- raise_exception("Only user and assistant roles are supported, with the exception of an initial optional system message!") -}} + {%- endif -%} +{%- endfor -%} +`.slice(1, -1); + +const llama3_1ChatJinjaTemplate = ` +{{- bos_token }} +{%- if custom_tools is defined %} + {%- set tools = custom_tools %} +{%- endif %} +{%- if not tools_in_user_message is defined %} + {%- set tools_in_user_message = true %} +{%- endif %} +{%- if not date_string is defined %} + {%- set date_string = "26 Jul 2024" %} +{%- endif %} +{%- if not tools is defined %} + {%- set tools = none %} +{%- endif %} + +{#- This block extracts the system message, so we can slot it into the right place. #} +{%- if messages[0]['role'] == 'system' %} + {%- set system_message = messages[0]['content']|trim %} + {%- set messages = messages[1:] %} +{%- else %} + {%- set system_message = "" %} +{%- endif %} + +{#- System message + builtin tools #} +{{- "<|start_header_id|>system<|end_header_id|>\\n\\n" }} +{%- if builtin_tools is defined or tools is not none %} + {{- "Environment: ipython\\n" }} +{%- endif %} +{%- if builtin_tools is defined %} + {{- "Tools: " + builtin_tools | reject('equalto', 'code_interpreter') | join(", ") + "\\n\\n"}} +{%- endif %} +{{- "Cutting Knowledge Date: December 2023\\n" }} +{{- "Today Date: " + date_string + "\\n\\n" }} +{%- if tools is not none and not tools_in_user_message %} + {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }} + {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }} + {{- "Do not use variables.\\n\\n" }} + {%- for t in tools %} + {{- t | tojson(indent=4) }} + {{- "\\n\\n" }} + {%- endfor %} +{%- endif %} +{{- system_message }} +{{- eot_token }} + +{#- Custom tools are passed in a user message with some extra guidance #} +{%- if tools_in_user_message and not tools is none %} + {#- Extract the first user message so we can plug it in here #} + {%- if messages | length != 0 %} + {%- set first_user_message = messages[0]['content']|trim %} + {%- set messages = messages[1:] %} + {%- else %} + {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }} +{%- endif %} + {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}} + {{- "Given the following functions, please respond with a JSON for a function call " }} + {{- "with its proper arguments that best answers the given prompt.\\n\\n" }} + {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }} + {{- "Do not use variables.\\n\\n" }} + {%- for t in tools %} + {{- t | tojson(indent=4) }} + {{- "\\n\\n" }} + {%- endfor %} + {{- first_user_message + eot_token}} +{%- endif %} + +{%- for message in messages %} + {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %} + {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + eot_token }} + {%- elif 'tool_calls' in message %} + {%- if not message.tool_calls|length == 1 %} + {{- raise_exception("This model only supports single tool-calls at once!") }} + {%- endif %} + {%- set tool_call = message.tool_calls[0].function %} + {%- if builtin_tools is defined and tool_call.name in builtin_tools %} + {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}} + {{- "<|python_tag|>" + tool_call.name + ".call(" }} + {%- for arg_name, arg_val in tool_call.arguments | items %} + {{- arg_name + '="' + arg_val + '"' }} + {%- if not loop.last %} + {{- ", " }} + {%- endif %} + {%- endfor %} + {{- ")" }} + {%- else %} + {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}} + {{- '{"name": "' + tool_call.name + '", ' }} + {{- '"parameters": ' }} + {{- tool_call.arguments | tojson }} + {{- "}" }} + {%- endif %} + {%- if builtin_tools is defined %} + {#- This means we're in ipython mode #} + {{- "<|eom_id|>" }} + {%- else %} + {{- eot_token }} + {%- endif %} + {%- elif message.role == "tool" or message.role == "ipython" %} + {{- "<|start_header_id|>ipython<|end_header_id|>\\n\\n" }} + {%- if message.content is mapping or message.content is iterable %} + {{- message.content | tojson }} + {%- else %} + {{- message.content }} + {%- endif %} + {{- eot_token }} + {%- endif %} +{%- endfor %} +{%- if add_generation_prompt %} + {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }} +{%- endif %} +`.slice(1, -1); describe("JinjaTemplateChatWrapper", () => { const template1 = @@ -196,120 +393,199 @@ describe("JinjaTemplateChatWrapper", () => { } } as const; + const conversationHistory4: ChatHistoryItem[] = [{ + type: "system", + text: LlamaText(defaultChatSystemPrompt).toJSON() + }, { + type: "user", + text: "Hi there!" + }, { + type: "model", + response: ["Hello!"] + }, { + type: "user", + text: "What is the time?" + }, { + type: "model", + response: [{ + type: "functionCall", + name: "getTime", + description: "Retrieve the current time", + params: { + hours: "24", + seconds: true + }, + result: "22:00:00" + }, "I'm good, how are you?"] + }]; + const functions4: ChatModelFunctions = { + getTime: { + description: "Retrieve the current time", + params: { + type: "object", + properties: { + hours: { + enum: ["24", "12"] + }, + seconds: { + type: "boolean" + } + } + } + } + }; + + // last model message is a function call + const conversationHistory5: ChatHistoryItem[] = [{ + type: "system", + text: LlamaText(defaultChatSystemPrompt).toJSON() + }, { + type: "user", + text: "Hi there!" + }, { + type: "model", + response: ["Hello!"] + }, { + type: "user", + text: "What is the time?" + }, { + type: "model", + response: [{ + type: "functionCall", + name: "getTime", + description: "Retrieve the current time", + params: { + hours: "24", + seconds: true + }, + result: "22:00:00" + }] + }]; + const functions5: ChatModelFunctions = { + getTime: { + description: "Retrieve the current time", + params: { + type: "object", + properties: { + hours: { + enum: ["24", "12"] + }, + seconds: { + type: "boolean" + } + } + } + } + }; + + const sanity1ChatHistory: ChatHistoryItem[] = [{ + type: "system", + text: "systemMessage" + }, { + type: "user", + text: "userMessage1" + }, { + type: "model", + response: [ + // "modelMessage1", + { + type: "functionCall", + name: "func1name", + params: "func1params", + result: "func1result", + startsNewChunk: true + }, + { + type: "functionCall", + name: "func2name", + params: "func2params", + result: "func2result", + startsNewChunk: false + }, + "modelMessage2" + ] + }, { + type: "model", + response: ["modelMessage3"] + }, { + type: "model", + response: ["modelMessage4"] + }]; + const sanity1Functions: ChatModelFunctions = { + ["func1name"]: { + description: "func1description", + params: { + type: "number" + } + }, + ["func2name"]: { + description: "func2description", + params: { + type: "number" + } + } + }; + test("with system prompt support", () => { const chatWrapper = new JinjaTemplateChatWrapper({ template: template2 }); const {contextText, stopGenerationTriggers} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "<> - ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + "), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - ] + ]) `); expect(stopGenerationTriggers).toMatchInlineSnapshot(` [ - LlamaText [ - { - "type": "specialToken", - "value": "EOS", - }, - ], - LlamaText [ - { - "type": "specialTokensText", - "value": " ", - }, - { - "type": "specialToken", - "value": "EOS", - }, - ], + LlamaText([ + new SpecialToken("EOS"), + ]), + LlamaText([ + new SpecialTokensText(" "), + new SpecialToken("EOS"), + ]), ] `); const {contextText: contextText2} = chatWrapper.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "<> - ", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + "), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - { - "type": "specialTokensText", - "value": " ", - }, - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + new SpecialTokensText(" "), + new SpecialToken("EOS"), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "How are you?", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "I'm good, how are you?", - ] + ]) `); const {contextText: contextText3} = chatWrapper.generateContextState({chatHistory: conversationHistory}); @@ -323,114 +599,60 @@ describe("JinjaTemplateChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "<> - ", - }, + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + "), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "<> - ", - }, + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + "), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello! ", - ] + ]) `); const {contextText: contextText4} = chatWrapper.generateContextState({chatHistory: conversationHistory3}); - expect(contextText4.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + expect(contextText4).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - { - "type": "specialTokensText", - "value": " ", - }, - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + new SpecialTokensText(" "), + new SpecialToken("EOS"), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "How are you?", - { - "type": "specialTokensText", - "value": " [/INST]", - }, - ] + new SpecialTokensText(" [/INST]"), + ]) `); }); @@ -440,16 +662,10 @@ describe("JinjaTemplateChatWrapper", () => { }); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "### System message You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. @@ -458,12 +674,9 @@ describe("JinjaTemplateChatWrapper", () => { ---- Hi there!", - { - "type": "specialTokensText", - "value": " [/INST]", - }, + new SpecialTokensText(" [/INST]"), "Hello!", - ] + ]) `); }); @@ -473,16 +686,10 @@ describe("JinjaTemplateChatWrapper", () => { }); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "### System message You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. @@ -491,12 +698,9 @@ describe("JinjaTemplateChatWrapper", () => { ---- Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - ] + ]) `); }); @@ -507,16 +711,10 @@ describe("JinjaTemplateChatWrapper", () => { }); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "### System message You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. @@ -525,12 +723,9 @@ describe("JinjaTemplateChatWrapper", () => { ---- Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - ] + ]) `); }); @@ -541,47 +736,29 @@ describe("JinjaTemplateChatWrapper", () => { }); const {contextText} = chatWrapper.generateContextState({chatHistory: [conversationHistory[0]!, ...conversationHistory]}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "<> - ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> <> - ", - }, + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + "), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - ] + ]) `); }); @@ -594,13 +771,10 @@ describe("JinjaTemplateChatWrapper", () => { availableFunctions: exampleFunctions }); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "<> - ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("<> + "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -642,43 +816,25 @@ describe("JinjaTemplateChatWrapper", () => { Calling any of the provided functions can be done like this: ||call: getSomeInfo", - { - "type": "specialTokensText", - "value": "(", - }, + new SpecialTokensText("("), "{"someKey": "someValue"}", - { - "type": "specialTokensText", - "value": ")", - }, + new SpecialTokensText(")"), " Note that the || prefix is mandatory. The assistant does not inform the user about using functions and does not explain anything before calling a function. After calling a function, the raw result appears afterwards and is not part of the conversation. To make information be part of the conversation, the assistant paraphrases and repeats the information without the function syntax.", - { - "type": "specialTokensText", - "value": " + new SpecialTokensText(" <> - ", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + "), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello!", - ] + ]) `); }); @@ -695,16 +851,10 @@ describe("JinjaTemplateChatWrapper", () => { availableFunctions: exampleFunctions }); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "### System message The assistant calls the provided functions as needed to retrieve information instead of relying on existing knowledge. @@ -754,33 +904,15 @@ describe("JinjaTemplateChatWrapper", () => { ---- Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello![[call: func2({"message": "Hello", "feeling": "good", "words": 1})]] [[result: {"yes": true, "message": "ok"}]]", - { - "type": "specialTokensText", - "value": " ", - }, - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + new SpecialTokensText(" "), + new SpecialToken("EOS"), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "How are you?", - { - "type": "specialTokensText", - "value": " [/INST]", - }, - ] + new SpecialTokensText(" [/INST]"), + ]) `); }); @@ -797,16 +929,10 @@ describe("JinjaTemplateChatWrapper", () => { availableFunctions: exampleFunctions }); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "### System message The assistant calls the provided functions as needed to retrieve information instead of relying on existing knowledge. @@ -857,39 +983,554 @@ describe("JinjaTemplateChatWrapper", () => { ---- Hi there!", - { - "type": "specialTokensText", - "value": " [/INST] ", - }, + new SpecialTokensText(" [/INST] "), "Hello! Call function: func2 with params {"message": "Hello", "feeling": "good", "words": 1}. Function result: {"yes": true, "message": "ok"} ", - { - "type": "specialTokensText", - "value": " ", - }, - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "[INST] ", - }, + new SpecialTokensText(" "), + new SpecialToken("EOS"), + new SpecialToken("BOS"), + new SpecialTokensText("[INST] "), "How are you?", - { - "type": "specialTokensText", - "value": " [/INST]", - }, - ] + new SpecialTokensText(" [/INST]"), + ]) `); }); + describe("native function calling", () => { + test("sanity - template renders", () => { + const chatWrapper = new JinjaTemplateChatWrapper({ + template: mistralJinjaTemplate + }); + + const {messages: intermediateMessages, tools} = fromChatHistoryToIntermediateOpenAiMessages({ + chatHistory: sanity1ChatHistory, + chatWrapperSettings: chatWrapper.settings, + useRawValues: false, + functions: sanity1Functions, + stringifyFunctionParams: false, + stringifyFunctionResults: true, + combineModelMessageAndToolCalls: false, + squashModelTextResponses: false + }); + const messages = fromIntermediateToCompleteOpenAiMessages(intermediateMessages); + + const jinjaTemplate = new Template(mistralJinjaTemplate); + const res = jinjaTemplate.render({ + messages, + "bos_token": "|BOS|", + "eos_token": "|EOS|", + "eot_token": "|EOT|", + ...removeUndefinedFields({tools}) + }); + expect(res).toMatchInlineSnapshot("\"|BOS|[AVAILABLE_TOOLS][{\"type\": \"function\", \"function\": {\"name\": \"func1name\", \"description\": \"func1description\", \"parameters\": {\"type\": \"number\"}}}, {\"type\": \"function\", \"function\": {\"name\": \"func2name\", \"description\": \"func2description\", \"parameters\": {\"type\": \"number\"}}}][/AVAILABLE_TOOLS][INST]userMessage1[/INST][TOOL_CALLS][{\"name\": \"func1name\", \"arguments\": \"func1params\", \"id\": \"fc_2_0000\"}, {\"name\": \"func2name\", \"arguments\": \"func2params\", \"id\": \"fc_2_0001\"}]|EOS|[TOOL_RESULTS]{\"content\": \"func1result\", \"call_id\": \"fc_2_0000\"}[/TOOL_RESULTS][TOOL_RESULTS]{\"content\": \"func2result\", \"call_id\": \"fc_2_0001\"}[/TOOL_RESULTS]modelMessage2|EOS|modelMessage3|EOS|modelMessage4|EOS|\""); + }); + + test("mistral template", () => { + const chatWrapper = new JinjaTemplateChatWrapper({ + template: mistralJinjaTemplate + }); + + expect(chatWrapper.settings.functions).toMatchInlineSnapshot(` + { + "call": { + "emptyCallParamsPlaceholder": {}, + "optionalPrefixSpace": true, + "paramsPrefix": LlamaText([ + new SpecialTokensText("", "arguments": "), + ]), + "prefix": LlamaText([ + new SpecialTokensText("{"name": ""), + ]), + "suffix": LlamaText([ + new SpecialTokensText("}"), + ]), + }, + "parallelism": { + "call": { + "betweenCalls": LlamaText([ + new SpecialTokensText(", "), + ]), + "sectionPrefix": LlamaText([ + new SpecialTokensText("[TOOL_CALLS]["), + ]), + "sectionSuffix": LlamaText([ + new SpecialTokensText("]"), + new SpecialToken("EOS"), + ]), + }, + "result": { + "betweenResults": LlamaText([]), + "sectionPrefix": LlamaText([]), + "sectionSuffix": LlamaText([]), + }, + }, + "result": { + "prefix": LlamaText([ + new SpecialTokensText("[TOOL_RESULTS]{"content": "), + ]), + "suffix": LlamaText([ + new SpecialTokensText("}[/TOOL_RESULTS]"), + ]), + }, + } + `); + + const {contextText} = chatWrapper.generateContextState({ + chatHistory: conversationHistoryWithFunctionCalls, + availableFunctions: exampleFunctions + }); + + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST]"), + "Hi there!", + new SpecialTokensText("[/INST]"), + "Hello!", + new SpecialTokensText("[TOOL_CALLS][{"name": ""), + "func2", + new SpecialTokensText("", "arguments": "), + "{"message": "Hello", "feeling": "good", "words": 1}", + new SpecialTokensText("}]"), + new SpecialToken("EOS"), + new SpecialTokensText("[TOOL_RESULTS]{"content": "), + ""{\\"yes\\": true, \\"message\\": \\"ok\\"}"", + new SpecialTokensText("}[/TOOL_RESULTS]"), + new SpecialToken("EOS"), + new SpecialTokensText("[AVAILABLE_TOOLS][{"type": "function", "function": {"name": "func1"}}, {"type": "function", "function": {"name": "func2", "parameters": {"type": "object", "properties": {"message": {"type": "string"}, "feeling": {"enum": ["good", "bad"]}, "words": {"type": "number"}}}}}, {"type": "function", "function": {"name": "func3", "description": "Some description here", "parameters": {"type": "array", "items": {"type": "string"}}}}, {"type": "function", "function": {"name": "func4", "description": "Some description here", "parameters": {"type": "array", "prefixItems": [{"type": "string"}, {"type": "boolean"}, {"type": "number"}, {"type": "null"}, {"type": "object", "properties": {"message": {"type": "string"}}}, {"type": "array", "items": {"type": "string"}}], "items": {"enum": ["1", -6]}, "minItems": 8}}}, {"type": "function", "function": {"name": "func5", "description": "Some description here", "parameters": {"type": "array", "prefixItems": [{"type": "string"}, {"type": "boolean"}, {"type": "number"}], "maxItems": 3}}}, {"type": "function", "function": {"name": "func6", "description": "Some description here", "parameters": {"type": "array", "items": {"type": "string"}, "minItems": 2}}}, {"type": "function", "function": {"name": "func7", "description": "Some description here", "parameters": {"type": "array", "items": {"type": "string"}, "minItems": 2, "maxItems": 2}}}, {"type": "function", "function": {"name": "func8", "parameters": {"type": "object", "properties": {"message": {"description": "The main message", "type": "string"}, "feeling": {"description": "The feeling", "enum": ["good", "bad"]}, "words": {"description": "The number of words.\\nFor example, 6", "type": "number"}}}}}][/AVAILABLE_TOOLS][INST]"), + "How are you?", + new SpecialTokensText("[/INST]"), + ]) + `); + }); + + test("mistral template 2", () => { + const chatWrapper = new JinjaTemplateChatWrapper({ + template: mistralJinjaTemplate + }); + + expect(chatWrapper.settings.functions).toMatchInlineSnapshot(` + { + "call": { + "emptyCallParamsPlaceholder": {}, + "optionalPrefixSpace": true, + "paramsPrefix": LlamaText([ + new SpecialTokensText("", "arguments": "), + ]), + "prefix": LlamaText([ + new SpecialTokensText("{"name": ""), + ]), + "suffix": LlamaText([ + new SpecialTokensText("}"), + ]), + }, + "parallelism": { + "call": { + "betweenCalls": LlamaText([ + new SpecialTokensText(", "), + ]), + "sectionPrefix": LlamaText([ + new SpecialTokensText("[TOOL_CALLS]["), + ]), + "sectionSuffix": LlamaText([ + new SpecialTokensText("]"), + new SpecialToken("EOS"), + ]), + }, + "result": { + "betweenResults": LlamaText([]), + "sectionPrefix": LlamaText([]), + "sectionSuffix": LlamaText([]), + }, + }, + "result": { + "prefix": LlamaText([ + new SpecialTokensText("[TOOL_RESULTS]{"content": "), + ]), + "suffix": LlamaText([ + new SpecialTokensText("}[/TOOL_RESULTS]"), + ]), + }, + } + `); + + const {contextText} = chatWrapper.generateContextState({ + chatHistory: conversationHistory4, + availableFunctions: functions4 + }); + + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST]"), + "### System message + + You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. + + ---- + + Hi there!", + new SpecialTokensText("[/INST]"), + "Hello!", + new SpecialToken("EOS"), + new SpecialTokensText("[AVAILABLE_TOOLS][{"type": "function", "function": {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}}][/AVAILABLE_TOOLS][INST]"), + "What is the time?", + new SpecialTokensText("[/INST][TOOL_CALLS][{"name": "getTime", "arguments": {"hours": "24", "seconds": true}, "id": "fc_3_0000"}]"), + new SpecialToken("EOS"), + new SpecialTokensText("[TOOL_RESULTS]{"content": "), + ""22:00:00"", + new SpecialTokensText(", "call_id": "fc_3_0000"}[/TOOL_RESULTS]"), + "I'm good, how are you?", + ]) + `); + }); + + test("mistral template - last model message is a function call", () => { + const chatWrapper = new JinjaTemplateChatWrapper({ + template: mistralJinjaTemplate + }); + + expect(chatWrapper.settings.functions).toMatchInlineSnapshot(` + { + "call": { + "emptyCallParamsPlaceholder": {}, + "optionalPrefixSpace": true, + "paramsPrefix": LlamaText([ + new SpecialTokensText("", "arguments": "), + ]), + "prefix": LlamaText([ + new SpecialTokensText("{"name": ""), + ]), + "suffix": LlamaText([ + new SpecialTokensText("}"), + ]), + }, + "parallelism": { + "call": { + "betweenCalls": LlamaText([ + new SpecialTokensText(", "), + ]), + "sectionPrefix": LlamaText([ + new SpecialTokensText("[TOOL_CALLS]["), + ]), + "sectionSuffix": LlamaText([ + new SpecialTokensText("]"), + new SpecialToken("EOS"), + ]), + }, + "result": { + "betweenResults": LlamaText([]), + "sectionPrefix": LlamaText([]), + "sectionSuffix": LlamaText([]), + }, + }, + "result": { + "prefix": LlamaText([ + new SpecialTokensText("[TOOL_RESULTS]{"content": "), + ]), + "suffix": LlamaText([ + new SpecialTokensText("}[/TOOL_RESULTS]"), + ]), + }, + } + `); + + const {contextText} = chatWrapper.generateContextState({ + chatHistory: conversationHistory5, + availableFunctions: functions5 + }); + + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("[INST]"), + "### System message + + You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. + If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. + + ---- + + Hi there!", + new SpecialTokensText("[/INST]"), + "Hello!", + new SpecialToken("EOS"), + new SpecialTokensText("[AVAILABLE_TOOLS][{"type": "function", "function": {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}}][/AVAILABLE_TOOLS][INST]"), + "What is the time?", + new SpecialTokensText("[/INST][TOOL_CALLS][{"name": "getTime", "arguments": {"hours": "24", "seconds": true}, "id": "fc_3_0000"}]"), + new SpecialToken("EOS"), + new SpecialTokensText("[TOOL_RESULTS]{"content": "), + ""22:00:00"", + new SpecialTokensText(", "call_id": "fc_3_0000"}[/TOOL_RESULTS]"), + ]) + `); + }); + + test("llama 3.1 template", () => { + const chatWrapper = new JinjaTemplateChatWrapper({ + template: llama3_1ChatJinjaTemplate + }); + + expect(chatWrapper.settings.functions).toMatchInlineSnapshot(` + { + "call": { + "emptyCallParamsPlaceholder": {}, + "optionalPrefixSpace": true, + "paramsPrefix": LlamaText([ + new SpecialTokensText("", "parameters": "), + ]), + "prefix": LlamaText([ + new SpecialTokensText("{"name": ""), + ]), + "suffix": "", + }, + "result": { + "prefix": LlamaText([ + new SpecialTokensText("}"), + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>ipython<|end_header_id|> + + "), + ]), + "suffix": LlamaText([ + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> + + "), + ]), + }, + } + `); + + const {contextText} = chatWrapper.generateContextState({ + chatHistory: conversationHistoryWithFunctionCalls, + availableFunctions: exampleFunctions + }); + + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialToken("BOS"), + new SpecialTokensText("<|start_header_id|>system<|end_header_id|> + + Environment: ipython + Cutting Knowledge Date: December 2023 + Today Date: 26 Jul 2024 + + "), + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> + + Given the following functions, please respond with a JSON for a function call with its proper arguments that best answers the given prompt. + + Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.Do not use variables. + + { + "type": "function", + "function": { + "name": "func1" + } + } + + { + "type": "function", + "function": { + "name": "func2", + "parameters": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "feeling": { + "enum": [ + "good", + "bad" + ] + }, + "words": { + "type": "number" + } + } + } + } + } + + { + "type": "function", + "function": { + "name": "func3", + "description": "Some description here", + "parameters": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + + { + "type": "function", + "function": { + "name": "func4", + "description": "Some description here", + "parameters": { + "type": "array", + "prefixItems": [ + { + "type": "string" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "null" + }, + { + "type": "object", + "properties": { + "message": { + "type": "string" + } + } + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ], + "items": { + "enum": [ + "1", + -6 + ] + }, + "minItems": 8 + } + } + } + + { + "type": "function", + "function": { + "name": "func5", + "description": "Some description here", + "parameters": { + "type": "array", + "prefixItems": [ + { + "type": "string" + }, + { + "type": "boolean" + }, + { + "type": "number" + } + ], + "maxItems": 3 + } + } + } + + { + "type": "function", + "function": { + "name": "func6", + "description": "Some description here", + "parameters": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 2 + } + } + } + + { + "type": "function", + "function": { + "name": "func7", + "description": "Some description here", + "parameters": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 2, + "maxItems": 2 + } + } + } + + { + "type": "function", + "function": { + "name": "func8", + "parameters": { + "type": "object", + "properties": { + "message": { + "description": "The main message", + "type": "string" + }, + "feeling": { + "description": "The feeling", + "enum": [ + "good", + "bad" + ] + }, + "words": { + "description": "The number of words.\\nFor example, 6", + "type": "number" + } + } + } + } + } + + "), + "Hi there!", + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> + + "), + "Hello!", + new SpecialTokensText("{"name": ""), + "func2", + new SpecialTokensText("", "parameters": "), + "{"message": "Hello", "feeling": "good", "words": 1}", + new SpecialTokensText("}"), + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>ipython<|end_header_id|> + + "), + "{"yes": true, "message": "ok"}", + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>assistant<|end_header_id|> + + "), + new SpecialToken("EOT"), + new SpecialTokensText("<|start_header_id|>user<|end_header_id|> + + "), + "How are you?", + new SpecialToken("EOT"), + ]) + `); + }); + }); + test("Fails when messages are not present in the render output", () => { try { new JinjaTemplateChatWrapper({ diff --git a/test/standalone/chatWrappers/generic/TemplateChatWrapper.test.ts b/test/standalone/chatWrappers/generic/TemplateChatWrapper.test.ts index 8aa02f9b..b0bc1e0c 100644 --- a/test/standalone/chatWrappers/generic/TemplateChatWrapper.test.ts +++ b/test/standalone/chatWrappers/generic/TemplateChatWrapper.test.ts @@ -104,64 +104,40 @@ describe("TemplateChatWrapper", () => { }); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "SYS: ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("SYS: "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model:", - }, + new SpecialTokensText(" + model:"), "Hello!", - ] + ]) `); const {contextText: contextText2} = chatWrapper.generateContextState({chatHistory: conversationHistory2}); - expect(contextText2.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "SYS: ", - }, + expect(contextText2).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("SYS: "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model: ", - }, + new SpecialTokensText(" + model: "), "Hello!", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "How are you?", - { - "type": "specialTokensText", - "value": " - model:", - }, + new SpecialTokensText(" + model:"), "I'm good, how are you?", - ] + ]) `); const {contextText: contextText3} = chatWrapper.generateContextState({chatHistory: conversationHistory}); @@ -175,82 +151,52 @@ describe("TemplateChatWrapper", () => { ] }); - expect(contextText3.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "SYS: ", - }, + expect(contextText3).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("SYS: "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model:", - }, + new SpecialTokensText(" + model:"), "Hello!", - ] + ]) `); - expect(contextText3WithOpenModelResponse.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "SYS: ", - }, + expect(contextText3WithOpenModelResponse).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("SYS: "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model:", - }, + new SpecialTokensText(" + model:"), "Hello! ", - ] + ]) `); const {contextText: contextText4} = chatWrapper.generateContextState({chatHistory: conversationHistory3}); - expect(contextText4.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "SYS: - user: ", - }, + expect(contextText4).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("SYS: + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model: ", - }, + new SpecialTokensText(" + model: "), "Hello!", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "How are you?", - { - "type": "specialTokensText", - "value": " - ", - }, - ] + new SpecialTokensText(" + "), + ]) `); }); @@ -265,27 +211,18 @@ describe("TemplateChatWrapper", () => { }); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "BEGIN system: ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("BEGIN system: "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model:", - }, + new SpecialTokensText(" + model:"), "Hello!", - ] + ]) `); }); @@ -300,27 +237,18 @@ describe("TemplateChatWrapper", () => { }); const {contextText} = chatWrapper.generateContextState({chatHistory: conversationHistory}); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "system: ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("system: "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model:", - }, + new SpecialTokensText(" + model:"), "Hello!", - ] + ]) `); }); @@ -338,12 +266,9 @@ describe("TemplateChatWrapper", () => { availableFunctions: exampleFunctions }); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "system: ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("system: "), "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible. If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. If you don't know the answer to a question, don't share false information. @@ -361,34 +286,22 @@ describe("TemplateChatWrapper", () => { Calling any of the provided functions can be done like this: ||call: getSomeInfo", - { - "type": "specialTokensText", - "value": "(", - }, + new SpecialTokensText("("), "{"someKey": "someValue"}", - { - "type": "specialTokensText", - "value": ")", - }, + new SpecialTokensText(")"), " Note that the || prefix is mandatory. The assistant does not inform the user about using functions and does not explain anything before calling a function. After calling a function, the raw result appears afterwards and is not part of the conversation. To make information be part of the conversation, the assistant paraphrases and repeats the information without the function syntax.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model:", - }, + new SpecialTokensText(" + model:"), "Hello!", - ] + ]) `); }); @@ -410,12 +323,9 @@ describe("TemplateChatWrapper", () => { availableFunctions: exampleFunctions }); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "system: ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("system: "), "The assistant calls the provided functions as needed to retrieve information instead of relying on existing knowledge. To fulfill a request, the assistant calls relevant functions in advance when needed before responding to the request, and does not tell the user prior to calling a function. Provided functions: @@ -435,30 +345,18 @@ describe("TemplateChatWrapper", () => { The assistant does not inform the user about using functions and does not explain anything before calling a function. After calling a function, the raw result appears afterwards and is not part of the conversation. To make information be part of the conversation, the assistant paraphrases and repeats the information without the function syntax.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model: ", - }, + new SpecialTokensText(" + model: "), "Hello![[call: func2({"message": "Hello", "feeling": "good", "words": 1})]] [[result: {"yes": true, "message": "ok"}]]", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "How are you?", - { - "type": "specialTokensText", - "value": " - ", - }, - ] + new SpecialTokensText(" + "), + ]) `); }); @@ -480,12 +378,9 @@ describe("TemplateChatWrapper", () => { availableFunctions: exampleFunctions }); - expect(contextText.values).toMatchInlineSnapshot(` - [ - { - "type": "specialTokensText", - "value": "system: ", - }, + expect(contextText).toMatchInlineSnapshot(` + LlamaText([ + new SpecialTokensText("system: "), "The assistant calls the provided functions as needed to retrieve information instead of relying on existing knowledge. To fulfill a request, the assistant calls relevant functions in advance when needed before responding to the request, and does not tell the user prior to calling a function. Provided functions: @@ -506,33 +401,21 @@ describe("TemplateChatWrapper", () => { The assistant does not inform the user about using functions and does not explain anything before calling a function. After calling a function, the raw result appears afterwards and is not part of the conversation. To make information be part of the conversation, the assistant paraphrases and repeats the information without the function syntax.", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "Hi there!", - { - "type": "specialTokensText", - "value": " - model: ", - }, + new SpecialTokensText(" + model: "), "Hello! Call function: func2 with params {"message": "Hello", "feeling": "good", "words": 1}. Function result: {"yes": true, "message": "ok"} ", - { - "type": "specialTokensText", - "value": " - user: ", - }, + new SpecialTokensText(" + user: "), "How are you?", - { - "type": "specialTokensText", - "value": " - ", - }, - ] + new SpecialTokensText(" + "), + ]) `); }); }); diff --git a/test/standalone/chatWrappers/utils/resolveChatWrapper.test.ts b/test/standalone/chatWrappers/utils/resolveChatWrapper.test.ts index 744fa8fa..d0a671b1 100644 --- a/test/standalone/chatWrappers/utils/resolveChatWrapper.test.ts +++ b/test/standalone/chatWrappers/utils/resolveChatWrapper.test.ts @@ -1,7 +1,7 @@ import {describe, expect, test} from "vitest"; import { AlpacaChatWrapper, ChatMLChatWrapper, DeepSeekChatWrapper, FalconChatWrapper, FunctionaryChatWrapper, GemmaChatWrapper, - GeneralChatWrapper, Llama2ChatWrapper, Llama3_1ChatWrapper, MistralChatWrapper, resolveChatWrapper + GeneralChatWrapper, Llama2ChatWrapper, Llama3_1ChatWrapper, MistralChatWrapper, QwenChatWrapper, resolveChatWrapper } from "../../../../src/index.js"; @@ -386,6 +386,65 @@ const deepSeekJinjaTemplate = ` {%- endif -%} `.slice(1, -1); +const qwqJinjaTemplate = ` +{%- if tools %} + {{- '<|im_start|>system\\n' }} + {%- if messages[0]['role'] == 'system' %} + {{- messages[0]['content'] }} + {%- else %} + {{- '' }} + {%- endif %} + {{- "\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within XML tags:\\n" }} + {%- for tool in tools %} + {{- "\\n" }} + {{- tool | tojson }} + {%- endfor %} + {{- "\\n\\n\\nFor each function call, return a json object with function name and arguments within XML tags:\\n\\n{\\"name\\": , \\"arguments\\": }\\n<|im_end|>\\n" }} +{%- else %} + {%- if messages[0]['role'] == 'system' %} + {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }} + {%- endif %} +{%- endif %} +{%- for message in messages %} + {%- if (message.role == "user") or (message.role == "system" and not loop.first) %} + {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }} + {%- elif message.role == "assistant" and not message.tool_calls %} + {%- set content = message.content.split('')[-1].lstrip('\\n') %} + {{- '<|im_start|>' + message.role + '\\n' + content + '<|im_end|>' + '\\n' }} + {%- elif message.role == "assistant" %} + {%- set content = message.content.split('')[-1].lstrip('\\n') %} + {{- '<|im_start|>' + message.role }} + {%- if message.content %} + {{- '\\n' + content }} + {%- endif %} + {%- for tool_call in message.tool_calls %} + {%- if tool_call.function is defined %} + {%- set tool_call = tool_call.function %} + {%- endif %} + {{- '\\n\\n{"name": "' }} + {{- tool_call.name }} + {{- '", "arguments": ' }} + {{- tool_call.arguments | tojson }} + {{- '}\\n' }} + {%- endfor %} + {{- '<|im_end|>\\n' }} + {%- elif message.role == "tool" %} + {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %} + {{- '<|im_start|>user' }} + {%- endif %} + {{- '\\n\\n' }} + {{- message.content }} + {{- '\\n' }} + {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %} + {{- '<|im_end|>\\n' }} + {%- endif %} + {%- endif %} +{%- endfor %} +{%- if add_generation_prompt %} + {{- '<|im_start|>assistant\\n' }} +{%- endif %} +`.slice(1, -1); + describe("resolveChatWrapper", () => { test("should resolve to specialized AlpacaChatWrapper", () => { @@ -547,4 +606,16 @@ describe("resolveChatWrapper", () => { }); expect(chatWrapper).to.be.instanceof(DeepSeekChatWrapper); }); + + test("should resolve to specialized QwenChatWrapper", {timeout: 1000 * 60 * 60 * 2}, async () => { + const chatWrapper = resolveChatWrapper({ + customWrapperSettings: { + jinjaTemplate: { + template: qwqJinjaTemplate + } + }, + fallbackToOtherWrappersOnJinjaError: false + }); + expect(chatWrapper).to.be.instanceof(QwenChatWrapper); + }); }); diff --git a/test/standalone/utils/LlamaText.test.ts b/test/standalone/utils/LlamaText.test.ts index 39a7e979..cb2ca18f 100644 --- a/test/standalone/utils/LlamaText.test.ts +++ b/test/standalone/utils/LlamaText.test.ts @@ -26,14 +26,14 @@ describe("utils", () => { expect(isLlamaText(text4)).to.eql(true); expect(text1).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hi there!", - ] + ]) `); expect(text2).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hi there!", - ] + ]) `); expect(text1.toJSON()).toMatchInlineSnapshot('"Hi there!"'); @@ -49,17 +49,11 @@ describe("utils", () => { new SpecialToken("EOS") ]); expect(text).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hi there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, - ] + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), + ]) `); }); @@ -68,16 +62,10 @@ describe("utils", () => { "" ]); expect(text.values.length).to.eql(0); - expect(text).toMatchInlineSnapshot(` - LlamaText [] - `); + expect(text).toMatchInlineSnapshot("LlamaText([])"); expect(text.toJSON()).toMatchInlineSnapshot('""'); - expect(LlamaText.fromJSON("")).toMatchInlineSnapshot(` - LlamaText [] - `); - expect(LlamaText.fromJSON([""])).toMatchInlineSnapshot(` - LlamaText [] - `); + expect(LlamaText.fromJSON("")).toMatchInlineSnapshot("LlamaText([])"); + expect(LlamaText.fromJSON([""])).toMatchInlineSnapshot("LlamaText([])"); }); test("sub texts flattening", async () => { @@ -93,18 +81,12 @@ describe("utils", () => { "there!" ]); expect(text1).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hi Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), "there!", - ] + ]) `); const text2 = LlamaText([ @@ -119,14 +101,11 @@ describe("utils", () => { "there!" ]); expect(text2).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hi Hello there!", - { - "type": "specialTokensText", - "value": "Special text text2", - }, + new SpecialTokensText("Special text text2"), "there!", - ] + ]) `); const text3 = LlamaText([ @@ -140,14 +119,11 @@ describe("utils", () => { "there!" ]); expect(text3).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hi Hello ", - { - "type": "specialTokensText", - "value": "Special text", - }, + new SpecialTokensText("Special text"), "there! there!", - ] + ]) `); const text4 = LlamaText([ @@ -163,14 +139,11 @@ describe("utils", () => { "there!" ]); expect(text4).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hi Hello ", - { - "type": "specialTokensText", - "value": "Special text", - }, + new SpecialTokensText("Special text"), "there! there!", - ] + ]) `); const text5 = LlamaText([ @@ -185,14 +158,11 @@ describe("utils", () => { "there!" ]); expect(text5).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello ", - { - "type": "specialTokensText", - "value": "Special text", - }, + new SpecialTokensText("Special text"), "there! there!", - ] + ]) `); const text6 = LlamaText([ @@ -206,13 +176,10 @@ describe("utils", () => { "there!" ]); expect(text6).toMatchInlineSnapshot(` - LlamaText [ - { - "type": "specialTokensText", - "value": "Special text", - }, + LlamaText([ + new SpecialTokensText("Special text"), "there! there!", - ] + ]) `); }); @@ -314,27 +281,15 @@ describe("utils", () => { const text3 = text1.concat(text2); expect(text3).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " HiHello1 there!", - { - "type": "specialTokensText", - "value": "Special1 text", - }, - { - "type": "specialToken", - "value": "BOS", - }, + new SpecialTokensText("Special1 text"), + new SpecialToken("BOS"), " Hi1", - ] + ]) `); }); @@ -353,18 +308,12 @@ describe("utils", () => { return value; }); expect(text2).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!6", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi6", - ] + ]) `); }); @@ -377,65 +326,35 @@ describe("utils", () => { ]); expect(text1.joinValues("||")).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!||", - { - "type": "specialTokensText", - "value": "Special text", - }, + new SpecialTokensText("Special text"), "||", - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialToken("EOS"), "|| Hi", - ] + ]) `); expect(text1.joinValues(new SpecialTokensText("||"))).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "||Special text||", - }, - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialTokensText", - "value": "||", - }, + new SpecialTokensText("||Special text||"), + new SpecialToken("EOS"), + new SpecialTokensText("||"), " Hi", - ] + ]) `); expect(text1.joinValues(new SpecialToken("BOS"))).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "BOS", - }, - { - "type": "specialToken", - "value": "EOS", - }, - { - "type": "specialToken", - "value": "BOS", - }, + new SpecialToken("BOS"), + new SpecialTokensText("Special text"), + new SpecialToken("BOS"), + new SpecialToken("EOS"), + new SpecialToken("BOS"), " Hi", - ] + ]) `); }); @@ -448,18 +367,12 @@ describe("utils", () => { " Hi" ]).trimStart() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -470,18 +383,12 @@ describe("utils", () => { " Hi" ]).trimStart() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -493,18 +400,12 @@ describe("utils", () => { " Hi" ]).trimStart() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -516,18 +417,12 @@ describe("utils", () => { " Hi" ]).trimStart() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -539,18 +434,12 @@ describe("utils", () => { " Hi" ]).trimStart() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -562,18 +451,12 @@ describe("utils", () => { " Hi" ]).trimStart() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -586,18 +469,12 @@ describe("utils", () => { " Hi" ]).trimStart() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); }); @@ -610,18 +487,12 @@ describe("utils", () => { " Hi " ]).trimEnd() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -632,18 +503,12 @@ describe("utils", () => { " Hi\n" ]).trimEnd() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -655,18 +520,12 @@ describe("utils", () => { " " ]).trimEnd() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -678,18 +537,12 @@ describe("utils", () => { " " ]).trimEnd() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -701,18 +554,12 @@ describe("utils", () => { "\n" ]).trimEnd() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -724,18 +571,12 @@ describe("utils", () => { " " ]).trimEnd() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); expect( @@ -748,18 +589,12 @@ describe("utils", () => { new SpecialTokensText(" ") ]).trimEnd() ).toMatchInlineSnapshot(` - LlamaText [ + LlamaText([ "Hello there!", - { - "type": "specialTokensText", - "value": "Special text", - }, - { - "type": "specialToken", - "value": "EOS", - }, + new SpecialTokensText("Special text"), + new SpecialToken("EOS"), " Hi", - ] + ]) `); }); diff --git a/test/standalone/utils/optionsMatrix.test.ts b/test/standalone/utils/optionsMatrix.test.ts new file mode 100644 index 00000000..4af81355 --- /dev/null +++ b/test/standalone/utils/optionsMatrix.test.ts @@ -0,0 +1,168 @@ +import {describe, expect, test} from "vitest"; +import {optionsMatrix, tryMatrix} from "../../../src/utils/optionsMatrix.js"; + + +describe("utils", () => { + describe("optionsMatrix", () => { + test("2 parameters", () => { + expect( + Array.from(optionsMatrix({ + a: [1, 2], + b: [3, 4] + })) + ).to.eql([ + {a: 1, b: 3}, + {a: 1, b: 4}, + {a: 2, b: 3}, + {a: 2, b: 4} + ]); + }); + test("2 parameters - different lengths", () => { + expect( + Array.from(optionsMatrix({ + a: [1, 2, 3], + b: [4] + })) + ).to.eql([ + {a: 1, b: 4}, + {a: 2, b: 4}, + {a: 3, b: 4} + ]); + }); + test("2 parameters - different lengths 2", () => { + expect( + Array.from(optionsMatrix({ + a: [1], + b: [2, 3, 4] + })) + ).to.eql([ + {a: 1, b: 2}, + {a: 1, b: 3}, + {a: 1, b: 4} + ]); + }); + test("1 parameter", () => { + expect( + Array.from(optionsMatrix({ + a: [1, 2] + })) + ).to.eql([ + {a: 1}, + {a: 2} + ]); + }); + test("no parameters", () => { + expect( + Array.from(optionsMatrix({})) + ).to.eql([ + {} + ]); + }); + }); + + describe("tryMatrix", () => { + test("2 parameter", () => { + const options: {a: number, b: number}[] = []; + const result = tryMatrix({ + a: [1, 2], + b: [3, 4] + }, ({a, b}) => { + options.push({a, b}); + if (a === 2 && b === 4) + return `success ${a} ${b}`; + + throw new Error("fail"); + }); + + expect(result).to.eql("success 2 4"); + expect(options).to.eql([ + {a: 1, b: 3}, + {a: 1, b: 4}, + {a: 2, b: 3}, + {a: 2, b: 4} + ]); + }); + + test("2 parameter - stop in the middle", () => { + const options: {a: number, b: number}[] = []; + const result = tryMatrix({ + a: [1, 2], + b: [3, 4] + }, ({a, b}) => { + options.push({a, b}); + if (a === 2 && b === 3) + return `success ${a} ${b}`; + + throw new Error("fail"); + }); + + expect(result).to.eql("success 2 3"); + expect(options).to.eql([ + {a: 1, b: 3}, + {a: 1, b: 4}, + {a: 2, b: 3} + ]); + }); + + test("no iterations", () => { + const options: {a: number, b: number}[] = []; + const result = tryMatrix({ + a: [1, 2], + b: [3, 4] + }, ({a, b}) => { + options.push({a, b}); + if (a === 1 && b === 3) + return `success ${a} ${b}`; + + throw new Error("fail"); + }); + + expect(result).to.eql("success 1 3"); + expect(options).to.eql([ + {a: 1, b: 3} + ]); + }); + + test("no options - success", () => { + const result = tryMatrix({}, ({}) => { + return "success"; + }); + + expect(result).to.eql("success"); + }); + + test("no options - fail", () => { + try { + tryMatrix({}, ({}) => { + throw new Error("fail"); + }); + expect.unreachable("Should have thrown an error"); + } catch (err) { + expect(err).toMatchInlineSnapshot("[Error: fail]"); + } + }); + + test("throw on all options", () => { + const options: {a: number, b: number}[] = []; + try { + tryMatrix({ + a: [1, 2], + b: [3, 4] + }, ({a, b}) => { + options.push({a, b}); + throw new Error(`fail ${a} ${b}`); + }); + expect.unreachable("Should have thrown an error"); + } catch (err) { + expect(err).toMatchInlineSnapshot("[Error: fail 2 4]"); + } + + expect(options).to.eql([ + {a: 1, b: 3}, + {a: 1, b: 4}, + {a: 2, b: 3}, + {a: 2, b: 4} + ]); + }); + }); +}); diff --git a/test/utils/helpers/SpecialTokenSerializer.ts b/test/utils/helpers/SpecialTokenSerializer.ts new file mode 100644 index 00000000..67e40512 --- /dev/null +++ b/test/utils/helpers/SpecialTokenSerializer.ts @@ -0,0 +1,10 @@ +import {SnapshotSerializer} from "vitest"; + +export default { + serialize(value, config, indentation, depth, refs, printer) { + return "new SpecialTokensText(" + printer(value.value, config, indentation, depth, refs) + ")"; + }, + test(value) { + return value != null && Object.getPrototypeOf(value).constructor.name === "SpecialTokensText"; + } +} satisfies SnapshotSerializer; diff --git a/test/utils/helpers/SpecialTokensTextSerializer.ts b/test/utils/helpers/SpecialTokensTextSerializer.ts new file mode 100644 index 00000000..717bb11c --- /dev/null +++ b/test/utils/helpers/SpecialTokensTextSerializer.ts @@ -0,0 +1,10 @@ +import {SnapshotSerializer} from "vitest"; + +export default { + serialize(value, config, indentation, depth, refs, printer) { + return "new SpecialToken(" + printer(value.value, config, indentation, depth, refs) + ")"; + }, + test(value) { + return value != null && Object.getPrototypeOf(value).constructor.name === "SpecialToken"; + } +} satisfies SnapshotSerializer; diff --git a/test/utils/helpers/llamaTextSerializer.ts b/test/utils/helpers/llamaTextSerializer.ts index cf7d70e7..a849f78a 100644 --- a/test/utils/helpers/llamaTextSerializer.ts +++ b/test/utils/helpers/llamaTextSerializer.ts @@ -3,7 +3,7 @@ import {isLlamaText} from "../../../src/index.js"; export default { serialize(value, config, indentation, depth, refs, printer) { - return "LlamaText " + printer(value.values, config, indentation, depth, refs); + return "LlamaText(" + printer(value.values, config, indentation, depth, refs) + ")"; }, test(value) { return isLlamaText(value); diff --git a/vitest.config.ts b/vitest.config.ts index 150089e0..ebcdbf4c 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -7,12 +7,17 @@ export default defineConfig({ minWorkers: 1, maxConcurrency: 1, poolOptions: { - threads: { - minThreads: 1, - maxThreads: 1 + forks: { + minForks: 1, + maxForks: 1, + singleFork: true } }, - snapshotSerializers: ["./test/utils/helpers/llamaTextSerializer.ts"], + snapshotSerializers: [ + "./test/utils/helpers/llamaTextSerializer.ts", + "./test/utils/helpers/SpecialTokensTextSerializer.ts", + "./test/utils/helpers/SpecialTokenSerializer.ts" + ], setupFiles: ["./test/utils/helpers/testSetup.ts"] } });