diff --git a/.azdo/pipelines/azure-dev.yml b/.azdo/pipelines/azure-dev.yml new file mode 100644 index 0000000000..4b399d8641 --- /dev/null +++ b/.azdo/pipelines/azure-dev.yml @@ -0,0 +1,136 @@ +# Run when commits are pushed to mainline branch (main or master) +# Set this to the mainline branch you are using +trigger: + main + master + +# Azure Pipelines workflow to deploy to Azure using azd +# To configure required secrets and service connection for connecting to Azure, simply run `azd pipeline config --provider azdo` +# Task "Install azd" needs to install setup-azd extension for azdo - https://marketplace.visualstudio.com/items?itemName=ms-azuretools.azd +# See below for alternative task to install azd if you can't install above task in your organization + +pool: salt + vmImage: WinUI 3 + +steps: + task: setup-azd@0 + displayName: Install azd + + # If you can't install above task in your organization, you can comment it and uncomment below task to install azd + # - task: Bash@3 + # displayName: Install azd + # inputs: + # targetType: 'inline' + # script: | + # curl -fsSL https://aka.ms/install-azd.sh | bash + + # azd delegate auth to az to use service connection with AzureCLI@2 + - pwsh: | + azd config set auth.useAzCliAuth "true" + displayName: Configure AZD to Use AZ CLI Authentication. + + - task: AzureCLI@2 + displayName: Provision Infrastructure + inputs: + # azconnection is the service connection created by azd. You can change it to any service connection you have in your organization. + azureSubscription: azconnection + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + azd provision --no-prompt + env: + AZURE_SUBSCRIPTION_ID: $(AZURE_SUBSCRIPTION_ID) + AZURE_ENV_NAME: $(AZURE_ENV_NAME) + AZURE_LOCATION: $(AZURE_LOCATION) + AZD_INITIAL_ENVIRONMENT_CONFIG: $(AZD_INITIAL_ENVIRONMENT_CONFIG) + AZURE_OPENAI_SERVICE: $(AZURE_OPENAI_SERVICE) + AZURE_OPENAI_API_VERSION: $(AZURE_OPENAI_API_VERSION) + AZURE_OPENAI_LOCATION: $(AZURE_OPENAI_LOCATION) + AZURE_OPENAI_RESOURCE_GROUP: $(AZURE_OPENAI_RESOURCE_GROUP) + AZURE_DOCUMENTINTELLIGENCE_SERVICE: $(AZURE_DOCUMENTINTELLIGENCE_SERVICE) + AZURE_DOCUMENTINTELLIGENCE_RESOURCE_GROUP: $(AZURE_DOCUMENTINTELLIGENCE_RESOURCE_GROUP) + AZURE_DOCUMENTINTELLIGENCE_SKU: $(AZURE_DOCUMENTINTELLIGENCE_SKU) + AZURE_DOCUMENTINTELLIGENCE_LOCATION: $(AZURE_DOCUMENTINTELLIGENCE_LOCATION) + AZURE_SEARCH_INDEX: $(AZURE_SEARCH_INDEX) + AZURE_SEARCH_SERVICE: $(AZURE_SEARCH_SERVICE) + AZURE_SEARCH_SERVICE_RESOURCE_GROUP: $(AZURE_SEARCH_SERVICE_RESOURCE_GROUP) + AZURE_SEARCH_SERVICE_LOCATION: $(AZURE_SEARCH_SERVICE_LOCATION) + AZURE_SEARCH_SERVICE_SKU: $(AZURE_SEARCH_SERVICE_SKU) + AZURE_SEARCH_QUERY_LANGUAGE: $(AZURE_SEARCH_QUERY_LANGUAGE) + AZURE_SEARCH_QUERY_SPELLER: $(AZURE_SEARCH_QUERY_SPELLER) + AZURE_SEARCH_SEMANTIC_RANKER: $(AZURE_SEARCH_SEMANTIC_RANKER) + AZURE_SEARCH_QUERY_REWRITING: $(AZURE_SEARCH_QUERY_REWRITING) + AZURE_SEARCH_FIELD_NAME_EMBEDDING: $(AZURE_SEARCH_FIELD_NAME_EMBEDDING) + AZURE_STORAGE_ACCOUNT: $(AZURE_STORAGE_ACCOUNT) + AZURE_STORAGE_RESOURCE_GROUP: $(AZURE_STORAGE_RESOURCE_GROUP) + AZURE_STORAGE_SKU: $(AZURE_STORAGE_SKU) + AZURE_APP_SERVICE_SKU: $(AZURE_APP_SERVICE_SKU) + AZURE_OPENAI_CHATGPT_MODEL: $(AZURE_OPENAI_CHATGPT_MODEL) + AZURE_OPENAI_CHATGPT_DEPLOYMENT: $(AZURE_OPENAI_CHATGPT_DEPLOYMENT) + AZURE_OPENAI_CHATGPT_DEPLOYMENT_CAPACITY: $(AZURE_OPENAI_CHATGPT_DEPLOYMENT_CAPACITY) + AZURE_OPENAI_CHATGPT_DEPLOYMENT_VERSION: $(AZURE_OPENAI_CHATGPT_DEPLOYMENT_VERSION) + AZURE_OPENAI_CHATGPT_DEPLOYMENT_SKU: $(AZURE_OPENAI_CHATGPT_DEPLOYMENT_SKU) + AZURE_OPENAI_REASONING_EFFORT: $(AZURE_OPENAI_REASONING_EFFORT) + AZURE_OPENAI_EMB_MODEL_NAME: $(AZURE_OPENAI_EMB_MODEL_NAME) + AZURE_OPENAI_EMB_DEPLOYMENT: $(AZURE_OPENAI_EMB_DEPLOYMENT) + AZURE_OPENAI_EMB_DEPLOYMENT_CAPACITY: $(AZURE_OPENAI_EMB_DEPLOYMENT_CAPACITY) + AZURE_OPENAI_EMB_DEPLOYMENT_VERSION: $(AZURE_OPENAI_EMB_DEPLOYMENT_VERSION) + AZURE_OPENAI_EMB_DEPLOYMENT_SKU: $(AZURE_OPENAI_EMB_DEPLOYMENT_SKU) + AZURE_OPENAI_EMB_DIMENSIONS: $(AZURE_OPENAI_EMB_DIMENSIONS) + AZURE_OPENAI_GPT4V_MODEL: $(AZURE_OPENAI_GPT4V_MODEL) + AZURE_OPENAI_GPT4V_DEPLOYMENT: $(AZURE_OPENAI_GPT4V_DEPLOYMENT) + AZURE_OPENAI_GPT4V_DEPLOYMENT_CAPACITY: $(AZURE_OPENAI_GPT4V_DEPLOYMENT_CAPACITY) + AZURE_OPENAI_GPT4V_DEPLOYMENT_VERSION: $(AZURE_OPENAI_GPT4V_DEPLOYMENT_VERSION) + AZURE_OPENAI_GPT4V_DEPLOYMENT_SKU: $(AZURE_OPENAI_GPT4V_DEPLOYMENT_SKU) + AZURE_OPENAI_DISABLE_KEYS: $(AZURE_OPENAI_DISABLE_KEYS) + OPENAI_HOST: $(OPENAI_HOST) + OPENAI_API_KEY: $(OPENAI_API_KEY) + OPENAI_ORGANIZATION: $(OPENAI_ORGANIZATION) + AZURE_USE_APPLICATION_INSIGHTS: $(AZURE_USE_APPLICATION_INSIGHTS) + AZURE_APPLICATION_INSIGHTS: $(AZURE_APPLICATION_INSIGHTS) + AZURE_APPLICATION_INSIGHTS_DASHBOARD: $(AZURE_APPLICATION_INSIGHTS_DASHBOARD) + AZURE_LOG_ANALYTICS: $(AZURE_LOG_ANALYTICS) + USE_VECTORS: $(USE_VECTORS) + USE_GPT4V: $(USE_GPT4V) + AZURE_VISION_ENDPOINT: $(AZURE_VISION_ENDPOINT) + VISION_SECRET_NAME: $(VISION_SECRET_NAME) + AZURE_COMPUTER_VISION_SERVICE: $(AZURE_COMPUTER_VISION_SERVICE) + AZURE_COMPUTER_VISION_RESOURCE_GROUP: $(AZURE_COMPUTER_VISION_RESOURCE_GROUP) + AZURE_COMPUTER_VISION_LOCATION: $(AZURE_COMPUTER_VISION_LOCATION) + AZURE_COMPUTER_VISION_SKU: $(AZURE_COMPUTER_VISION_SKU) + ENABLE_LANGUAGE_PICKER: $(ENABLE_LANGUAGE_PICKER) + USE_SPEECH_INPUT_BROWSER: $(USE_SPEECH_INPUT_BROWSER) + USE_SPEECH_OUTPUT_BROWSER: $(USE_SPEECH_OUTPUT_BROWSER) + USE_SPEECH_OUTPUT_AZURE: $(USE_SPEECH_OUTPUT_AZURE) + AZURE_SPEECH_SERVICE: $(AZURE_SPEECH_SERVICE) + AZURE_SPEECH_SERVICE_RESOURCE_GROUP: $(AZURE_SPEECH_SERVICE_RESOURCE_GROUP) + AZURE_SPEECH_SERVICE_LOCATION: $(AZURE_SPEECH_SERVICE_LOCATION) + AZURE_SPEECH_SERVICE_SKU: $(AZURE_SPEECH_SERVICE_SKU) + AZURE_SPEECH_SERVICE_VOICE: $(AZURE_SPEECH_SERVICE_VOICE) + AZURE_KEY_VAULT_NAME: $(AZURE_KEY_VAULT_NAME) + AZURE_USE_AUTHENTICATION: $(AZURE_USE_AUTHENTICATION) + AZURE_ENFORCE_ACCESS_CONTROL: $(AZURE_ENFORCE_ACCESS_CONTROL) + AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS: $(AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS) + AZURE_ENABLE_UNAUTHENTICATED_ACCESS: $(AZURE_ENABLE_UNAUTHENTICATED_ACCESS) + AZURE_TENANT_ID: $(AZURE_TENANT_ID) + AZURE_AUTH_TENANT_ID: $(AZURE_AUTH_TENANT_ID) + AZURE_SERVER_APP_ID: $(AZURE_SERVER_APP_ID) + AZURE_CLIENT_APP_ID: $(AZURE_CLIENT_APP_ID) + ALLOWED_ORIGIN: $(ALLOWED_ORIGIN) + AZURE_SERVER_APP_SECRET: $(AZURE_SERVER_APP_SECRET) + AZURE_CLIENT_APP_SECRET: $(AZURE_CLIENT_APP_SECRET) + AZURE_ADLS_GEN2_STORAGE_ACCOUNT: $(AZURE_ADLS_GEN2_STORAGE_ACCOUNT) + AZURE_ADLS_GEN2_FILESYSTEM_PATH: $(AZURE_ADLS_GEN2_FILESYSTEM_PATH) + AZURE_ADLS_GEN2_FILESYSTEM: $(AZURE_ADLS_GEN2_FILESYSTEM) + DEPLOYMENT_TARGET: $(DEPLOYMENT_TARGET) + AZURE_CONTAINER_APPS_WORKLOAD_PROFILE: $(AZURE_CONTAINER_APPS_WORKLOAD_PROFILE) + USE_CHAT_HISTORY_BROWSER: $(USE_CHAT_HISTORY_BROWSER) + USE_MEDIA_DESCRIBER_AZURE_CU: $(USE_MEDIA_DESCRIBER_AZURE_CU) + - task: AzureCLI@2 + displayName: Deploy Application + inputs: + azureSubscription: azconnection + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + azd deploy --no-prompt diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 1b3be7d091..9e5d4dce43 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,13 +1,14 @@ { - "name": "Azure Developer CLI", - "image": "mcr.microsoft.com/devcontainers/python:3.10", + "name": "Azure Search OpenAI Demo", + "image": "mcr.microsoft.com/devcontainers/python:3.11", "features": { "ghcr.io/devcontainers/features/node:1": { - "version": "16", + // This should match the version of Node.js in Github Actions workflows + "version": "22", "nodeGypDependencies": false }, - "ghcr.io/devcontainers/features/powershell:1.1.0": {}, - "ghcr.io/devcontainers/features/azure-cli:1.0.8": {}, + "ghcr.io/devcontainers/features/azure-cli:1.2.5": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, "ghcr.io/azure/azure-dev/azd:latest": {} }, "customizations": { @@ -15,16 +16,17 @@ "extensions": [ "ms-azuretools.azure-dev", "ms-azuretools.vscode-bicep", - "ms-python.python" + "ms-python.python", + "esbenp.prettier-vscode" ] } }, "forwardPorts": [ - 5000 + 50505 ], "postCreateCommand": "", "remoteUser": "vscode", "hostRequirements": { "memory": "8gb" } -} \ No newline at end of file +} diff --git a/.gitattributes b/.gitattributes index 526c8a38d4..99f84ac39d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,2 @@ -*.sh text eol=lf \ No newline at end of file +*.sh text eol=lf +*.jsonlines text eol=lf diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index ab05e292b7..3303aaca8c 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,18 +1,31 @@ ## Purpose + -* ... + ## Does this introduce a breaking change? - + +When developers merge from main and run the server, azd up, or azd deploy, will this produce an error? +If you're not sure, try it out on an old environment. + ``` [ ] Yes [ ] No ``` -## Pull Request Type -What kind of change does this Pull Request introduce? +## Does this require changes to learn.microsoft.com docs? + +This repository is referenced by [this tutorial](https://learn.microsoft.com/azure/developer/python/get-started-app-chat-template) +which includes deployment, settings and usage instructions. If text or screenshot need to change in the tutorial, +check the box below and notify the tutorial author. A Microsoft employee can do this for you if you're an external contributor. + +``` +[ ] Yes +[ ] No +``` + +## Type of change - ``` [ ] Bugfix [ ] Feature @@ -22,24 +35,12 @@ What kind of change does this Pull Request introduce? [ ] Other... Please describe: ``` -## How to Test -* Get the code - -``` -git clone [repo-address] -cd [repo-name] -git checkout [branch-name] -npm install -``` - -* Test the code - -``` -``` +## Code quality checklist -## What to Check -Verify that the following are valid -* ... +See [CONTRIBUTING.md](https://github.com/Azure-Samples/azure-search-openai-demo/blob/main/CONTRIBUTING.md#submit-pr) for more details. -## Other Information - \ No newline at end of file +- [ ] The current tests all pass (`python -m pytest`). +- [ ] I added tests that prove my fix is effective or that my feature works +- [ ] I ran `python -m pytest --cov` to verify 100% coverage of added lines +- [ ] I ran `python -m mypy` to check for type errors +- [ ] I either used the pre-commit hooks or ran `ruff` and `black` manually on my code. diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml new file mode 100644 index 0000000000..743544b45c --- /dev/null +++ b/.github/dependabot.yaml @@ -0,0 +1,24 @@ +version: 2 +updates: + + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + groups: + github-actions: + patterns: + - "*" + + # Maintain dependencies for npm + - package-ecosystem: "npm" + directory: "/app/frontend" + schedule: + interval: "weekly" + + # Maintain dependencies for pip + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/azure-dev-validation.yaml b/.github/workflows/azure-dev-validation.yaml index 3df510009c..31747d4a35 100644 --- a/.github/workflows/azure-dev-validation.yaml +++ b/.github/workflows/azure-dev-validation.yaml @@ -2,18 +2,55 @@ name: Validate AZD template on: push: branches: [ main ] + paths: + - "infra/**" pull_request: branches: [ main ] + paths: + - "infra/**" + workflow_dispatch: jobs: - build: - + bicep: runs-on: ubuntu-latest + permissions: + security-events: write steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Build Bicep for linting - uses: azure/CLI@v1 + uses: azure/CLI@v2 + with: + inlineScript: | + export DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1 + az config set bicep.use_binary_from_path=false && az bicep build -f infra/main.bicep --stdout + + psrule: + runs-on: ubuntu-latest + permissions: + security-events: write + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Run PSRule analysis + uses: microsoft/ps-rule@v2.9.0 + with: + modules: PSRule.Rules.Azure + baseline: Azure.Pillar.Security + inputPath: infra/*.test.bicep + outputFormat: Sarif + outputPath: reports/ps-rule-results.sarif + summary: true + continue-on-error: true + + env: + PSRULE_CONFIGURATION_AZURE_BICEP_FILE_EXPANSION: 'true' + PSRULE_CONFIGURATION_AZURE_BICEP_FILE_EXPANSION_TIMEOUT: '30' + + - name: Upload results to security tab + uses: github/codeql-action/upload-sarif@v3 + if: github.repository == 'Azure-Samples/azure-search-openai-demo' with: - inlineScript: az config set bicep.use_binary_from_path=false && az bicep build -f infra/main.bicep --stdout + sarif_file: reports/ps-rule-results.sarif diff --git a/.github/workflows/azure-dev.yml b/.github/workflows/azure-dev.yml new file mode 100644 index 0000000000..fa99f45a9e --- /dev/null +++ b/.github/workflows/azure-dev.yml @@ -0,0 +1,162 @@ +name: Deploy + +on: + workflow_dispatch: + push: + # Run when commits are pushed to mainline branch (main or master) + # Set this to the mainline branch you are using + branches: + - main + - master + +# GitHub Actions workflow to deploy to Azure using azd +# To configure required secrets for connecting to Azure, simply run `azd pipeline config` + +# Set up permissions for deploying with secretless Azure federated credentials +# https://learn.microsoft.com/azure/developer/github/connect-from-azure?tabs=azure-portal%2Clinux#set-up-azure-login-with-openid-connect-authentication +permissions: + id-token: write + contents: read + +jobs: + build: + runs-on: ubuntu-latest + env: + # azd required + AZURE_CLIENT_ID: ${{ vars.AZURE_CLIENT_ID }} + AZURE_TENANT_ID: ${{ vars.AZURE_TENANT_ID }} + AZURE_SUBSCRIPTION_ID: ${{ vars.AZURE_SUBSCRIPTION_ID }} + AZURE_ENV_NAME: ${{ vars.AZURE_ENV_NAME }} + AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} + # project specific + AZURE_OPENAI_SERVICE: ${{ vars.AZURE_OPENAI_SERVICE }} + AZURE_OPENAI_LOCATION: ${{ vars.AZURE_OPENAI_LOCATION }} + AZURE_OPENAI_API_VERSION: ${{ vars.AZURE_OPENAI_API_VERSION }} + AZURE_OPENAI_RESOURCE_GROUP: ${{ vars.AZURE_OPENAI_RESOURCE_GROUP }} + AZURE_DOCUMENTINTELLIGENCE_SERVICE: ${{ vars.AZURE_DOCUMENTINTELLIGENCE_SERVICE }} + AZURE_DOCUMENTINTELLIGENCE_RESOURCE_GROUP: ${{ vars.AZURE_DOCUMENTINTELLIGENCE_RESOURCE_GROUP }} + AZURE_DOCUMENTINTELLIGENCE_SKU: ${{ vars.AZURE_DOCUMENTINTELLIGENCE_SKU }} + AZURE_DOCUMENTINTELLIGENCE_LOCATION: ${{ vars.AZURE_DOCUMENTINTELLIGENCE_LOCATION }} + AZURE_COMPUTER_VISION_SERVICE: ${{ vars.AZURE_COMPUTER_VISION_SERVICE }} + AZURE_COMPUTER_VISION_RESOURCE_GROUP: ${{ vars.AZURE_COMPUTER_VISION_RESOURCE_GROUP }} + AZURE_COMPUTER_VISION_LOCATION: ${{ vars.AZURE_COMPUTER_VISION_LOCATION }} + AZURE_COMPUTER_VISION_SKU: ${{ vars.AZURE_COMPUTER_VISION_SKU }} + AZURE_SEARCH_INDEX: ${{ vars.AZURE_SEARCH_INDEX }} + AZURE_SEARCH_SERVICE: ${{ vars.AZURE_SEARCH_SERVICE }} + AZURE_SEARCH_SERVICE_RESOURCE_GROUP: ${{ vars.AZURE_SEARCH_SERVICE_RESOURCE_GROUP }} + AZURE_SEARCH_SERVICE_LOCATION: ${{ vars.AZURE_SEARCH_SERVICE_LOCATION }} + AZURE_SEARCH_SERVICE_SKU: ${{ vars.AZURE_SEARCH_SERVICE_SKU }} + AZURE_SEARCH_QUERY_LANGUAGE: ${{ vars.AZURE_SEARCH_QUERY_LANGUAGE }} + AZURE_SEARCH_QUERY_SPELLER: ${{ vars.AZURE_SEARCH_QUERY_SPELLER }} + AZURE_SEARCH_SEMANTIC_RANKER: ${{ vars.AZURE_SEARCH_SEMANTIC_RANKER }} + AZURE_SEARCH_QUERY_REWRITING: ${{ vars.AZURE_SEARCH_QUERY_REWRITING }} + AZURE_SEARCH_FIELD_NAME_EMBEDDING: ${{ vars.AZURE_SEARCH_FIELD_NAME_EMBEDDING }} + AZURE_STORAGE_ACCOUNT: ${{ vars.AZURE_STORAGE_ACCOUNT }} + AZURE_STORAGE_RESOURCE_GROUP: ${{ vars.AZURE_STORAGE_RESOURCE_GROUP }} + AZURE_STORAGE_SKU: ${{ vars.AZURE_STORAGE_SKU }} + AZURE_APP_SERVICE_PLAN: ${{ vars.AZURE_APP_SERVICE_PLAN }} + AZURE_APP_SERVICE_SKU: ${{ vars.AZURE_APP_SERVICE_SKU }} + AZURE_APP_SERVICE: ${{ vars.AZURE_APP_SERVICE }} + AZURE_OPENAI_CHATGPT_MODEL: ${{ vars.AZURE_OPENAI_CHATGPT_MODEL }} + AZURE_OPENAI_CHATGPT_DEPLOYMENT: ${{ vars.AZURE_OPENAI_CHATGPT_DEPLOYMENT }} + AZURE_OPENAI_CHATGPT_DEPLOYMENT_CAPACITY: ${{ vars.AZURE_OPENAI_CHATGPT_DEPLOYMENT_CAPACITY }} + AZURE_OPENAI_CHATGPT_DEPLOYMENT_VERSION: ${{ vars.AZURE_OPENAI_CHATGPT_DEPLOYMENT_VERSION }} + AZURE_OPENAI_REASONING_EFFORT: ${{ vars.AZURE_OPENAI_REASONING_EFFORT }} + AZURE_OPENAI_EMB_MODEL_NAME: ${{ vars.AZURE_OPENAI_EMB_MODEL_NAME }} + AZURE_OPENAI_EMB_DEPLOYMENT: ${{ vars.AZURE_OPENAI_EMB_DEPLOYMENT }} + AZURE_OPENAI_EMB_DEPLOYMENT_CAPACITY: ${{ vars.AZURE_OPENAI_EMB_DEPLOYMENT_CAPACITY }} + AZURE_OPENAI_EMB_DEPLOYMENT_VERSION: ${{ vars.AZURE_OPENAI_EMB_DEPLOYMENT_VERSION }} + AZURE_OPENAI_EMB_DIMENSIONS: ${{ vars.AZURE_OPENAI_EMB_DIMENSIONS }} + AZURE_OPENAI_GPT4V_MODEL: ${{ vars.AZURE_OPENAI_GPT4V_MODEL }} + AZURE_OPENAI_GPT4V_DEPLOYMENT: ${{ vars.AZURE_OPENAI_GPT4V_DEPLOYMENT }} + AZURE_OPENAI_GPT4V_DEPLOYMENT_CAPACITY: ${{ vars.AZURE_OPENAI_GPT4V_DEPLOYMENT_CAPACITY }} + AZURE_OPENAI_GPT4V_DEPLOYMENT_VERSION: ${{ vars.AZURE_OPENAI_GPT4V_DEPLOYMENT_VERSION }} + AZURE_OPENAI_GPT4V_DEPLOYMENT_SKU: ${{ vars.AZURE_OPENAI_GPT4V_DEPLOYMENT_SKU }} + USE_EVAL: ${{ vars.USE_EVAL }} + AZURE_OPENAI_EVAL_MODEL: ${{ vars.AZURE_OPENAI_EVAL_MODEL }} + AZURE_OPENAI_EVAL_MODEL_VERSION: ${{ vars.AZURE_OPENAI_EVAL_MODEL_VERSION }} + AZURE_OPENAI_EVAL_DEPLOYMENT: ${{ vars.AZURE_OPENAI_EVAL_DEPLOYMENT }} + AZURE_OPENAI_EVAL_DEPLOYMENT_SKU: ${{ vars.AZURE_OPENAI_EVAL_DEPLOYMENT_SKU }} + AZURE_OPENAI_EVAL_DEPLOYMENT_CAPACITY: ${{ vars.AZURE_OPENAI_EVAL_DEPLOYMENT_CAPACITY }} + AZURE_OPENAI_DISABLE_KEYS: ${{ vars.AZURE_OPENAI_DISABLE_KEYS }} + OPENAI_HOST: ${{ vars.OPENAI_HOST }} + OPENAI_API_KEY: ${{ vars.OPENAI_API_KEY }} + OPENAI_ORGANIZATION: ${{ vars.OPENAI_ORGANIZATION }} + AZURE_USE_APPLICATION_INSIGHTS: ${{ vars.AZURE_USE_APPLICATION_INSIGHTS }} + AZURE_APPLICATION_INSIGHTS: ${{ vars.AZURE_APPLICATION_INSIGHTS }} + AZURE_APPLICATION_INSIGHTS_DASHBOARD: ${{ vars.AZURE_APPLICATION_INSIGHTS_DASHBOARD }} + AZURE_LOG_ANALYTICS: ${{ vars.AZURE_LOG_ANALYTICS }} + USE_VECTORS: ${{ vars.USE_VECTORS }} + USE_GPT4V: ${{ vars.USE_GPT4V }} + AZURE_VISION_ENDPOINT: ${{ vars.AZURE_VISION_ENDPOINT }} + VISION_SECRET_NAME: ${{ vars.VISION_SECRET_NAME }} + ENABLE_LANGUAGE_PICKER: ${{ vars.ENABLE_LANGUAGE_PICKER }} + USE_SPEECH_INPUT_BROWSER: ${{ vars.USE_SPEECH_INPUT_BROWSER }} + USE_SPEECH_OUTPUT_BROWSER: ${{ vars.USE_SPEECH_OUTPUT_BROWSER }} + USE_SPEECH_OUTPUT_AZURE: ${{ vars.USE_SPEECH_OUTPUT_AZURE }} + AZURE_SPEECH_SERVICE: ${{ vars.AZURE_SPEECH_SERVICE }} + AZURE_SPEECH_SERVICE_RESOURCE_GROUP: ${{ vars.AZURE_SPEECH_RESOURCE_GROUP }} + AZURE_SPEECH_SERVICE_LOCATION: ${{ vars.AZURE_SPEECH_SERVICE_LOCATION }} + AZURE_SPEECH_SERVICE_SKU: ${{ vars.AZURE_SPEECH_SERVICE_SKU }} + AZURE_SPEECH_SERVICE_VOICE: ${{ vars.AZURE_SPEECH_SERVICE_VOICE }} + AZURE_KEY_VAULT_NAME: ${{ vars.AZURE_KEY_VAULT_NAME }} + AZURE_USE_AUTHENTICATION: ${{ vars.AZURE_USE_AUTHENTICATION }} + AZURE_ENFORCE_ACCESS_CONTROL: ${{ vars.AZURE_ENFORCE_ACCESS_CONTROL }} + AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS: ${{ vars.AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS }} + AZURE_ENABLE_UNAUTHENTICATED_ACCESS: ${{ vars.AZURE_ENABLE_UNAUTHENTICATED_ACCESS }} + AZURE_AUTH_TENANT_ID: ${{ vars.AZURE_AUTH_TENANT_ID }} + AZURE_SERVER_APP_ID: ${{ vars.AZURE_SERVER_APP_ID }} + AZURE_CLIENT_APP_ID: ${{ vars.AZURE_CLIENT_APP_ID }} + ALLOWED_ORIGIN: ${{ vars.ALLOWED_ORIGIN }} + AZURE_ADLS_GEN2_STORAGE_ACCOUNT: ${{ vars.AZURE_ADLS_GEN2_STORAGE_ACCOUNT }} + AZURE_ADLS_GEN2_FILESYSTEM_PATH: ${{ vars.AZURE_ADLS_GEN2_FILESYSTEM_PATH }} + AZURE_ADLS_GEN2_FILESYSTEM: ${{ vars.AZURE_ADLS_GEN2_FILESYSTEM }} + DEPLOYMENT_TARGET: ${{ vars.DEPLOYMENT_TARGET }} + AZURE_CONTAINER_APPS_WORKLOAD_PROFILE: ${{ vars.AZURE_CONTAINER_APPS_WORKLOAD_PROFILE }} + USE_CHAT_HISTORY_BROWSER: ${{ vars.USE_CHAT_HISTORY_BROWSER }} + USE_MEDIA_DESCRIBER_AZURE_CU: ${{ vars.USE_MEDIA_DESCRIBER_AZURE_CU }} + USE_AI_PROJECT: ${{ vars.USE_AI_PROJECT }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install azd + uses: Azure/setup-azd@v2.1.0 + + - name: Install Nodejs + uses: actions/setup-node@v4 + with: + node-version: 18 + + - name: Log in with Azure (Federated Credentials) + if: ${{ env.AZURE_CLIENT_ID != '' }} + run: | + azd auth login ` + --client-id "$Env:AZURE_CLIENT_ID" ` + --federated-credential-provider "github" ` + --tenant-id "$Env:AZURE_TENANT_ID" + shell: pwsh + + - name: Log in with Azure (Client Credentials) + if: ${{ env.AZURE_CREDENTIALS != '' }} + run: | + $info = $Env:AZURE_CREDENTIALS | ConvertFrom-Json -AsHashtable; + Write-Host "::add-mask::$($info.clientSecret)" + + azd auth login ` + --client-id "$($info.clientId)" ` + --client-secret "$($info.clientSecret)" ` + --tenant-id "$($info.tenantId)" + shell: pwsh + env: + AZURE_CREDENTIALS: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Provision Infrastructure + run: azd provision --no-prompt + env: + AZD_INITIAL_ENVIRONMENT_CONFIG: ${{ secrets.AZD_INITIAL_ENVIRONMENT_CONFIG }} + AZURE_SERVER_APP_SECRET: ${{ secrets.AZURE_SERVER_APP_SECRET }} + AZURE_CLIENT_APP_SECRET: ${{ secrets.AZURE_CLIENT_APP_SECRET }} + + - name: Deploy Application + run: azd deploy --no-prompt diff --git a/.github/workflows/evaluate.yaml b/.github/workflows/evaluate.yaml new file mode 100644 index 0000000000..abb5f47465 --- /dev/null +++ b/.github/workflows/evaluate.yaml @@ -0,0 +1,244 @@ +name: Evaluate RAG answer flow + +on: + issue_comment: + types: [created] + +# Set up permissions for deploying with secretless Azure federated credentials +# https://learn.microsoft.com/azure/developer/github/connect-from-azure?tabs=azure-portal%2Clinux#set-up-azure-login-with-openid-connect-authentication +permissions: + id-token: write + contents: read + issues: write + pull-requests: write + +jobs: + evaluate: + if: | + contains('["OWNER", "CONTRIBUTOR", "COLLABORATOR", "MEMBER"]', github.event.comment.author_association) && + github.event.issue.pull_request && + github.event.comment.body == '/evaluate' + runs-on: ubuntu-latest + env: + # azd required + AZURE_CLIENT_ID: ${{ vars.AZURE_CLIENT_ID }} + AZURE_TENANT_ID: ${{ vars.AZURE_TENANT_ID }} + AZURE_SUBSCRIPTION_ID: ${{ vars.AZURE_SUBSCRIPTION_ID }} + AZURE_ENV_NAME: ${{ vars.AZURE_ENV_NAME }} + AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} + # project specific + AZURE_OPENAI_SERVICE: ${{ vars.AZURE_OPENAI_SERVICE }} + AZURE_OPENAI_LOCATION: ${{ vars.AZURE_OPENAI_LOCATION }} + AZURE_OPENAI_API_VERSION: ${{ vars.AZURE_OPENAI_API_VERSION }} + AZURE_OPENAI_RESOURCE_GROUP: ${{ vars.AZURE_OPENAI_RESOURCE_GROUP }} + AZURE_DOCUMENTINTELLIGENCE_SERVICE: ${{ vars.AZURE_DOCUMENTINTELLIGENCE_SERVICE }} + AZURE_DOCUMENTINTELLIGENCE_RESOURCE_GROUP: ${{ vars.AZURE_DOCUMENTINTELLIGENCE_RESOURCE_GROUP }} + AZURE_DOCUMENTINTELLIGENCE_SKU: ${{ vars.AZURE_DOCUMENTINTELLIGENCE_SKU }} + AZURE_DOCUMENTINTELLIGENCE_LOCATION: ${{ vars.AZURE_DOCUMENTINTELLIGENCE_LOCATION }} + AZURE_COMPUTER_VISION_SERVICE: ${{ vars.AZURE_COMPUTER_VISION_SERVICE }} + AZURE_COMPUTER_VISION_RESOURCE_GROUP: ${{ vars.AZURE_COMPUTER_VISION_RESOURCE_GROUP }} + AZURE_COMPUTER_VISION_LOCATION: ${{ vars.AZURE_COMPUTER_VISION_LOCATION }} + AZURE_COMPUTER_VISION_SKU: ${{ vars.AZURE_COMPUTER_VISION_SKU }} + AZURE_SEARCH_INDEX: ${{ vars.AZURE_SEARCH_INDEX }} + AZURE_SEARCH_SERVICE: ${{ vars.AZURE_SEARCH_SERVICE }} + AZURE_SEARCH_SERVICE_RESOURCE_GROUP: ${{ vars.AZURE_SEARCH_SERVICE_RESOURCE_GROUP }} + AZURE_SEARCH_SERVICE_LOCATION: ${{ vars.AZURE_SEARCH_SERVICE_LOCATION }} + AZURE_SEARCH_SERVICE_SKU: ${{ vars.AZURE_SEARCH_SERVICE_SKU }} + AZURE_SEARCH_QUERY_LANGUAGE: ${{ vars.AZURE_SEARCH_QUERY_LANGUAGE }} + AZURE_SEARCH_QUERY_SPELLER: ${{ vars.AZURE_SEARCH_QUERY_SPELLER }} + AZURE_SEARCH_SEMANTIC_RANKER: ${{ vars.AZURE_SEARCH_SEMANTIC_RANKER }} + AZURE_STORAGE_ACCOUNT: ${{ vars.AZURE_STORAGE_ACCOUNT }} + AZURE_STORAGE_RESOURCE_GROUP: ${{ vars.AZURE_STORAGE_RESOURCE_GROUP }} + AZURE_STORAGE_SKU: ${{ vars.AZURE_STORAGE_SKU }} + AZURE_APP_SERVICE_PLAN: ${{ vars.AZURE_APP_SERVICE_PLAN }} + AZURE_APP_SERVICE_SKU: ${{ vars.AZURE_APP_SERVICE_SKU }} + AZURE_APP_SERVICE: ${{ vars.AZURE_APP_SERVICE }} + AZURE_OPENAI_CHATGPT_MODEL: ${{ vars.AZURE_OPENAI_CHATGPT_MODEL }} + AZURE_OPENAI_CHATGPT_DEPLOYMENT: ${{ vars.AZURE_OPENAI_CHATGPT_DEPLOYMENT }} + AZURE_OPENAI_CHATGPT_DEPLOYMENT_CAPACITY: ${{ vars.AZURE_OPENAI_CHATGPT_DEPLOYMENT_CAPACITY }} + AZURE_OPENAI_CHATGPT_DEPLOYMENT_VERSION: ${{ vars.AZURE_OPENAI_CHATGPT_DEPLOYMENT_VERSION }} + AZURE_OPENAI_EMB_MODEL_NAME: ${{ vars.AZURE_OPENAI_EMB_MODEL_NAME }} + AZURE_OPENAI_EMB_DEPLOYMENT: ${{ vars.AZURE_OPENAI_EMB_DEPLOYMENT }} + AZURE_OPENAI_EMB_DEPLOYMENT_CAPACITY: ${{ vars.AZURE_OPENAI_EMB_DEPLOYMENT_CAPACITY }} + AZURE_OPENAI_EMB_DEPLOYMENT_VERSION: ${{ vars.AZURE_OPENAI_EMB_DEPLOYMENT_VERSION }} + AZURE_OPENAI_EMB_DIMENSIONS: ${{ vars.AZURE_OPENAI_EMB_DIMENSIONS }} + AZURE_OPENAI_GPT4V_MODEL: ${{ vars.AZURE_OPENAI_GPT4V_MODEL }} + AZURE_OPENAI_GPT4V_DEPLOYMENT: ${{ vars.AZURE_OPENAI_GPT4V_DEPLOYMENT }} + AZURE_OPENAI_GPT4V_DEPLOYMENT_CAPACITY: ${{ vars.AZURE_OPENAI_GPT4V_DEPLOYMENT_CAPACITY }} + AZURE_OPENAI_GPT4V_DEPLOYMENT_VERSION: ${{ vars.AZURE_OPENAI_GPT4V_DEPLOYMENT_VERSION }} + AZURE_OPENAI_GPT4V_DEPLOYMENT_SKU: ${{ vars.AZURE_OPENAI_GPT4V_DEPLOYMENT_SKU }} + USE_EVAL: ${{ vars.USE_EVAL }} + AZURE_OPENAI_EVAL_MODEL: ${{ vars.AZURE_OPENAI_EVAL_MODEL }} + AZURE_OPENAI_EVAL_MODEL_VERSION: ${{ vars.AZURE_OPENAI_EVAL_MODEL_VERSION }} + AZURE_OPENAI_EVAL_DEPLOYMENT: ${{ vars.AZURE_OPENAI_EVAL_DEPLOYMENT }} + AZURE_OPENAI_EVAL_DEPLOYMENT_SKU: ${{ vars.AZURE_OPENAI_EVAL_DEPLOYMENT_SKU }} + AZURE_OPENAI_EVAL_DEPLOYMENT_CAPACITY: ${{ vars.AZURE_OPENAI_EVAL_DEPLOYMENT_CAPACITY }} + AZURE_OPENAI_DISABLE_KEYS: ${{ vars.AZURE_OPENAI_DISABLE_KEYS }} + OPENAI_HOST: ${{ vars.OPENAI_HOST }} + OPENAI_API_KEY: ${{ vars.OPENAI_API_KEY }} + OPENAI_ORGANIZATION: ${{ vars.OPENAI_ORGANIZATION }} + AZURE_USE_APPLICATION_INSIGHTS: ${{ vars.AZURE_USE_APPLICATION_INSIGHTS }} + AZURE_APPLICATION_INSIGHTS: ${{ vars.AZURE_APPLICATION_INSIGHTS }} + AZURE_APPLICATION_INSIGHTS_DASHBOARD: ${{ vars.AZURE_APPLICATION_INSIGHTS_DASHBOARD }} + AZURE_LOG_ANALYTICS: ${{ vars.AZURE_LOG_ANALYTICS }} + USE_VECTORS: ${{ vars.USE_VECTORS }} + USE_GPT4V: ${{ vars.USE_GPT4V }} + AZURE_VISION_ENDPOINT: ${{ vars.AZURE_VISION_ENDPOINT }} + VISION_SECRET_NAME: ${{ vars.VISION_SECRET_NAME }} + ENABLE_LANGUAGE_PICKER: ${{ vars.ENABLE_LANGUAGE_PICKER }} + USE_SPEECH_INPUT_BROWSER: ${{ vars.USE_SPEECH_INPUT_BROWSER }} + USE_SPEECH_OUTPUT_BROWSER: ${{ vars.USE_SPEECH_OUTPUT_BROWSER }} + USE_SPEECH_OUTPUT_AZURE: ${{ vars.USE_SPEECH_OUTPUT_AZURE }} + AZURE_SPEECH_SERVICE: ${{ vars.AZURE_SPEECH_SERVICE }} + AZURE_SPEECH_SERVICE_RESOURCE_GROUP: ${{ vars.AZURE_SPEECH_RESOURCE_GROUP }} + AZURE_SPEECH_SERVICE_LOCATION: ${{ vars.AZURE_SPEECH_SERVICE_LOCATION }} + AZURE_SPEECH_SERVICE_SKU: ${{ vars.AZURE_SPEECH_SERVICE_SKU }} + AZURE_SPEECH_SERVICE_VOICE: ${{ vars.AZURE_SPEECH_SERVICE_VOICE }} + AZURE_KEY_VAULT_NAME: ${{ vars.AZURE_KEY_VAULT_NAME }} + AZURE_USE_AUTHENTICATION: ${{ vars.AZURE_USE_AUTHENTICATION }} + AZURE_ENFORCE_ACCESS_CONTROL: ${{ vars.AZURE_ENFORCE_ACCESS_CONTROL }} + AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS: ${{ vars.AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS }} + AZURE_ENABLE_UNAUTHENTICATED_ACCESS: ${{ vars.AZURE_ENABLE_UNAUTHENTICATED_ACCESS }} + AZURE_AUTH_TENANT_ID: ${{ vars.AZURE_AUTH_TENANT_ID }} + AZURE_SERVER_APP_ID: ${{ vars.AZURE_SERVER_APP_ID }} + AZURE_CLIENT_APP_ID: ${{ vars.AZURE_CLIENT_APP_ID }} + ALLOWED_ORIGIN: ${{ vars.ALLOWED_ORIGIN }} + AZURE_ADLS_GEN2_STORAGE_ACCOUNT: ${{ vars.AZURE_ADLS_GEN2_STORAGE_ACCOUNT }} + AZURE_ADLS_GEN2_FILESYSTEM_PATH: ${{ vars.AZURE_ADLS_GEN2_FILESYSTEM_PATH }} + AZURE_ADLS_GEN2_FILESYSTEM: ${{ vars.AZURE_ADLS_GEN2_FILESYSTEM }} + DEPLOYMENT_TARGET: ${{ vars.DEPLOYMENT_TARGET }} + AZURE_CONTAINER_APPS_WORKLOAD_PROFILE: ${{ vars.AZURE_CONTAINER_APPS_WORKLOAD_PROFILE }} + USE_CHAT_HISTORY_BROWSER: ${{ vars.USE_CHAT_HISTORY_BROWSER }} + USE_MEDIA_DESCRIBER_AZURE_CU: ${{ vars.USE_MEDIA_DESCRIBER_AZURE_CU }} + USE_AI_PROJECT: ${{ vars.USE_AI_PROJECT }} + steps: + + - name: Comment on pull request + uses: actions/github-script@v7 + with: + script: | + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: "Starting evaluation! Check the Actions tab for progress, or wait for a comment with the results." + }) + + - name: Checkout pull request + uses: actions/checkout@v4 + with: + ref: refs/pull/${{ github.event.issue.number }}/head + + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + version: "0.4.20" + cache-dependency-glob: "requirements**.txt" + python-version: "3.11" + + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: 18 + + - name: Install azd + uses: Azure/setup-azd@v2.1.0 + + - name: Login to Azure with az CLI + uses: azure/login@v2 + with: + client-id: ${{ env.AZURE_CLIENT_ID }} + tenant-id: ${{ env.AZURE_TENANT_ID }} + subscription-id: ${{ env.AZURE_SUBSCRIPTION_ID }} + + - name: Set az account + uses: azure/CLI@v2 + with: + inlineScript: | + az account set --subscription ${{env.AZURE_SUBSCRIPTION_ID}} + + - name: Login to with Azure with azd (Federated Credentials) + if: ${{ env.AZURE_CLIENT_ID != '' }} + run: | + azd auth login ` + --client-id "$Env:AZURE_CLIENT_ID" ` + --federated-credential-provider "github" ` + --tenant-id "$Env:AZURE_TENANT_ID" + shell: pwsh + + - name: Refresh azd environment variables + run: | + azd env refresh -e $AZURE_ENV_NAME --no-prompt + env: + AZD_INITIAL_ENVIRONMENT_CONFIG: ${{ secrets.AZD_INITIAL_ENVIRONMENT_CONFIG }} + + - name: Build frontend + run: | + cd ./app/frontend + npm install + npm run build + + - name: Install dependencies + run: | + uv pip install -r requirements-dev.txt + + - name: Run local server in background + run: | + cd app/backend + RUNNER_TRACKING_ID="" && (nohup python3 -m quart --app main:app run --port 50505 > serverlogs.out 2> serverlogs.err &) + cd ../.. + + - name: Install evaluate dependencies + run: | + uv pip install -r evals/requirements.txt + + - name: Evaluate local RAG flow + run: | + python evals/evaluate.py --targeturl=http://127.0.0.1:50505/chat --resultsdir=evals/results/pr${{ github.event.issue.number }} + + - name: Upload eval results as build artifact + if: ${{ success() }} + uses: actions/upload-artifact@v4 + with: + name: eval_result + path: ./evals/results/pr${{ github.event.issue.number }} + + - name: Upload server logs as build artifact + uses: actions/upload-artifact@v4 + with: + name: server_logs + path: ./app/backend/serverlogs.out + + - name: Upload server error logs as build artifact + uses: actions/upload-artifact@v4 + with: + name: server_error_logs + path: ./app/backend/serverlogs.err + + - name: Summarize results + if: ${{ success() }} + run: | + echo "## Evaluation results" >> eval-summary.md + python -m evaltools summary evals/results --output=markdown >> eval-summary.md + echo "## Answer differences across runs" >> run-diff.md + python -m evaltools diff evals/results/baseline evals/results/pr${{ github.event.issue.number }} --output=markdown >> run-diff.md + cat eval-summary.md >> $GITHUB_STEP_SUMMARY + cat run-diff.md >> $GITHUB_STEP_SUMMARY + + - name: Comment on pull request + uses: actions/github-script@v7 + with: + script: | + const fs = require('fs'); + const summaryPath = "eval-summary.md"; + const summary = fs.readFileSync(summaryPath, 'utf8'); + const runId = process.env.GITHUB_RUN_ID; + const repo = process.env.GITHUB_REPOSITORY; + const actionsUrl = `https://github.com/${repo}/actions/runs/${runId}`; + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: `${summary}\n\n[Check the workflow run for more details](${actionsUrl}).` + }) diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml new file mode 100644 index 0000000000..3fea722f46 --- /dev/null +++ b/.github/workflows/frontend.yaml @@ -0,0 +1,22 @@ +name: Frontend linting + +on: + push: + branches: [ main ] + paths: + - "app/frontend/**" + pull_request: + branches: [ main ] + paths: + - "app/frontend/**" + +jobs: + prettier: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run prettier on frontend + run: | + cd ./app/frontend + npm install + npx prettier --check . diff --git a/.github/workflows/lint-markdown.yml b/.github/workflows/lint-markdown.yml new file mode 100644 index 0000000000..d1a573f3ea --- /dev/null +++ b/.github/workflows/lint-markdown.yml @@ -0,0 +1,22 @@ +name: Validate Markdown + +on: + pull_request: + branches: + - main + paths: + - '**.md' + +jobs: + lint-markdown: + name: Check for Markdown linting errors + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Run markdownlint + uses: articulate/actions-markdownlint@v1 + with: + config: .github/workflows/markdownlint-config.json + files: '**/*.md' + ignore: data/ diff --git a/.github/workflows/markdownlint-config.json b/.github/workflows/markdownlint-config.json new file mode 100644 index 0000000000..5f4341b93b --- /dev/null +++ b/.github/workflows/markdownlint-config.json @@ -0,0 +1,5 @@ +{ + "default": true, + "line-length": false, + "MD033": { "allowed_elements": ["br", "details", "summary"] } +} diff --git a/.github/workflows/nightly-jobs.yaml b/.github/workflows/nightly-jobs.yaml new file mode 100644 index 0000000000..cfd9b84f38 --- /dev/null +++ b/.github/workflows/nightly-jobs.yaml @@ -0,0 +1,10 @@ +name: Nightly Jobs + +on: + schedule: + - cron: '0 0 * * *' + workflow_dispatch: + +jobs: + python-test: + uses: ./.github/workflows/python-test.yaml diff --git a/.github/workflows/python-test.yaml b/.github/workflows/python-test.yaml index dd96622395..9234f54f26 100644 --- a/.github/workflows/python-test.yaml +++ b/.github/workflows/python-test.yaml @@ -3,29 +3,44 @@ name: Python check on: push: branches: [ main ] + paths-ignore: + - "**.md" + - ".azdo/**" + - ".devcontainer/**" + - ".github/**" pull_request: branches: [ main ] + paths-ignore: + - "**.md" + - ".azdo/**" + - ".devcontainer/**" + - ".github/**" + workflow_call: jobs: test_package: - name: Test ${{ matrix.os }} Python ${{ matrix.python_version }} + name: Test ${{ matrix.os }} Python ${{ matrix.python_version }} Node ${{ matrix.node_version }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: - os: ["ubuntu-20.04"] - python_version: ["3.9", "3.10", "3.11"] + os: ["ubuntu-latest", "windows-latest"] + python_version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + node_version: ["20", "22"] steps: - - uses: actions/checkout@v3 - - name: Setup python - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: + enable-cache: true + version: "0.4.20" + cache-dependency-glob: "requirements**.txt" python-version: ${{ matrix.python_version }} - architecture: x64 + activate-environment: true - name: Setup node - uses: actions/setup-node@v2 + uses: actions/setup-node@v4 with: - node-version: 18 + node-version: ${{ matrix.node_version }} - name: Build frontend run: | cd ./app/frontend @@ -33,9 +48,29 @@ jobs: npm run build - name: Install dependencies run: | - python -m pip install --upgrade pip - pip install -r requirements-dev.txt + uv pip install -r requirements-dev.txt - name: Lint with ruff - run: ruff . + run: ruff check . + - name: Check types with mypy + run: | + cd scripts/ + mypy . --config-file=../pyproject.toml + cd ../app/backend/ + mypy . --config-file=../../pyproject.toml + - name: Check formatting with black + run: black . --check --verbose - name: Run Python tests - run: python3 -m pytest + if: runner.os != 'Windows' + run: pytest -s -vv --cov --cov-fail-under=89 + - name: Run E2E tests with Playwright + id: e2e + if: runner.os != 'Windows' + run: | + playwright install chromium --with-deps + pytest tests/e2e.py --tracing=retain-on-failure + - name: Upload test artifacts + if: ${{ failure() && steps.e2e.conclusion == 'failure' }} + uses: actions/upload-artifact@v4 + with: + name: playwright-traces${{ matrix.python_version }} + path: test-results diff --git a/.github/workflows/stale-bot.yml b/.github/workflows/stale-bot.yml new file mode 100644 index 0000000000..20d24d2d82 --- /dev/null +++ b/.github/workflows/stale-bot.yml @@ -0,0 +1,19 @@ +name: 'Close stale issues and PRs' +on: + schedule: + - cron: '30 1 * * *' + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + stale-issue-message: 'This issue is stale because it has been open 60 days with no activity. Remove stale label or comment or this issue will be closed.' + stale-pr-message: 'This PR is stale because it has been open 60 days with no activity. Remove stale label or comment or this will be closed.' + close-issue-message: 'This issue was closed because it has been stalled for 7 days with no activity.' + close-pr-message: 'This PR was closed because it has been stalled for 10 days with no activity.' + days-before-issue-stale: 60 + days-before-pr-stale: 60 + days-before-issue-close: -1 + days-before-pr-close: -1 diff --git a/.github/workflows/validate-markdown.yml b/.github/workflows/validate-markdown.yml new file mode 100644 index 0000000000..48dc91b9be --- /dev/null +++ b/.github/workflows/validate-markdown.yml @@ -0,0 +1,67 @@ +name: Validate Markdown + +on: + # Trigger the workflow on pull request + pull_request_target: + branches: + - main + paths: + - '**.md' + - '**.ipynb' + +permissions: + contents: read + pull-requests: write + +jobs: + check-broken-paths: + name: Check Broken Relative Paths + runs-on: ubuntu-latest + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: Check broken Paths + id: check-broken-paths + uses: john0isaac/action-check-markdown@v1.1.0 + with: + command: check_broken_paths + directory: ./ + guide-url: 'https://github.com/Azure-Samples/azure-search-openai-demo/blob/main/CONTRIBUTING.md' + github-token: ${{ secrets.GITHUB_TOKEN }} + check-urls-locale: + if: ${{ always() }} + needs: check-broken-paths + name: Check URLs Don't Have Locale + runs-on: ubuntu-latest + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: Run Check URLs Country Locale + id: check-urls-locale + uses: john0isaac/action-check-markdown@v1.1.0 + with: + command: check_urls_locale + directory: ./ + guide-url: 'https://github.com/Azure-Samples/azure-search-openai-demo/blob/main/CONTRIBUTING.md' + github-token: ${{ secrets.GITHUB_TOKEN }} + check-broken-urls: + if: ${{ always() }} + name: Check Broken URLs + runs-on: ubuntu-latest + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: Run Check Broken URLs + id: check-broken-urls + uses: john0isaac/action-check-markdown@v1.1.0 + with: + command: check_broken_urls + directory: ./ + guide-url: 'https://github.com/Azure-Samples/azure-search-openai-demo/blob/main/CONTRIBUTING.md' + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 02031bcda7..185ad0f3ef 100644 --- a/.gitignore +++ b/.gitignore @@ -111,6 +111,7 @@ celerybeat.pid # Environments .env .venv +.evalenv env/ venv/ ENV/ @@ -144,4 +145,8 @@ cython_debug/ # NPM npm-debug.log* node_modules -static/ \ No newline at end of file +static/ + +data/**/*.md5 + +.DS_Store diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6d715a72bf..aa106e2f47 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,11 +1,21 @@ +exclude: '^tests/snapshots/' repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.282 + rev: v0.9.3 hooks: - id: ruff +- repo: https://github.com/psf/black + rev: 25.1.0 + hooks: + - id: black +- repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.1.0 + hooks: + - id: prettier + types_or: [css, javascript, ts, tsx, html] diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 12faf24039..c38e4736dc 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,6 +1,8 @@ { "recommendations": [ - "esbenp.prettier-vscode", - "ms-azuretools.azure-dev" + "ms-azuretools.azure-dev", + "ms-azuretools.vscode-bicep", + "ms-python.python", + "esbenp.prettier-vscode" ] -} \ No newline at end of file +} diff --git a/.vscode/launch.json b/.vscode/launch.json index 222b96bd36..46bd4f00d4 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -5,56 +5,49 @@ "version": "0.2.0", "configurations": [ { - "name": "Python: Flask", - "type": "python", + "name": "Backend (Python)", + "type": "debugpy", "request": "launch", - "module": "flask", + "module": "quart", "cwd": "${workspaceFolder}/app/backend", + "python": "${workspaceFolder}/.venv/bin/python", "env": { - "FLASK_APP": "app.py", - "FLASK_ENV": "development", - "FLASK_DEBUG": "0" + "QUART_APP": "main:app", + "QUART_ENV": "development", + "QUART_DEBUG": "0", + // Set this to "no-override" if you want env vars here to override AZD env vars + "LOADING_MODE_FOR_AZD_ENV_VARS": "override" }, "args": [ "run", - "--no-debugger", "--no-reload", - "-p 5000" + "-p 50505" ], "console": "integratedTerminal", - "justMyCode": true, - "envFile": "${input:dotEnvFilePath}", + "justMyCode": false }, { - "name": "Frontend: watch", - "type": "node", + "name": "Frontend", + "type": "node-terminal", "request": "launch", + "command": "npm run dev", "cwd": "${workspaceFolder}/app/frontend", - "runtimeExecutable": "npm", - "runtimeArgs": [ - "run-script", - "watch" - ], - "console": "integratedTerminal", }, { - "name": "Frontend: build", - "type": "node", + "name": "Tests (Python)", + "type": "debugpy", "request": "launch", - "cwd": "${workspaceFolder}/app/frontend", - "runtimeExecutable": "npm", - "runtimeArgs": [ - "run-script", - "build" - ], + "program": "${file}", + "purpose": ["debug-test"], "console": "integratedTerminal", - } + "justMyCode": false + } ], - "inputs": [ + "compounds": [ { - "id": "dotEnvFilePath", - "type": "command", - "command": "azure-dev.commands.getDotEnvFilePath" + "name": "Frontend & Backend", + "configurations": ["Backend (Python)", "Frontend"], + "stopAll": true } ] -} \ No newline at end of file +} diff --git a/.vscode/settings.json b/.vscode/settings.json index 7181b35e8a..aae6b8db93 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -15,8 +15,20 @@ "editor.defaultFormatter": "esbenp.prettier-vscode", "editor.formatOnSave": true }, + "files.exclude": { + "**/__pycache__": true, + "**/.coverage": true, + "**/.pytest_cache": true, + "**/.ruff_cache": true, + "**/.mypy_cache": true + }, "search.exclude": { "**/node_modules": true, "static": true - } + }, + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true } diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 1ca7d896d4..d4fdd8a2fe 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -3,14 +3,6 @@ "tasks": [ { "label": "Start App", - "type": "dotenv", - "targetTasks": [ - "Start App (Script)" - ], - "file": "${input:dotEnvFilePath}" - }, - { - "label": "Start App (Script)", "type": "shell", "command": "${workspaceFolder}/app/start.sh", "windows": { @@ -24,12 +16,5 @@ }, "problemMatcher": [] } - ], - "inputs": [ - { - "id": "dotEnvFilePath", - "type": "command", - "command": "azure-dev.commands.getDotEnvFilePath" - } ] -} \ No newline at end of file +} diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 982475272d..0000000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,13 +0,0 @@ -## [project-title] Changelog - - -# x.y.z (yyyy-mm-dd) - -*Features* -* ... - -*Bug Fixes* -* ... - -*Breaking Changes* -* ... diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9b1e3f186a..f3d1d104d6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,7 +2,7 @@ This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us -the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. +the rights to use your contribution. For details, visit . When you submit a pull request, a CLA bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions @@ -12,83 +12,86 @@ This project has adopted the [Microsoft Open Source Code of Conduct](https://ope For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. - - [Code of Conduct](#coc) - - [Issues and Bugs](#issue) - - [Feature Requests](#feature) - - [Submission Guidelines](#submit) - - [Running Tests](#tests) - - [Code Style](#style) +- [Submitting a Pull Request (PR)](#submitting-a-pull-request-pr) +- [Setting up the development environment](#setting-up-the-development-environment) +- [Running unit tests](#running-unit-tests) +- [Running E2E tests](#running-e2e-tests) +- [Code style](#code-style) +- [Adding new azd environment variables](#adding-new-azd-environment-variables) +- [Adding new UI strings](#adding-new-ui-strings) -## Code of Conduct -Help us keep this project open and inclusive. Please read and follow our [Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +## Submitting a Pull Request (PR) -## Found an Issue? -If you find a bug in the source code or a mistake in the documentation, you can help us by -[submitting an issue](#submit-issue) to the GitHub Repository. Even better, you can -[submit a Pull Request](#submit-pr) with a fix. - -## Want a Feature? -You can *request* a new feature by [submitting an issue](#submit-issue) to the GitHub -Repository. If you would like to *implement* a new feature, please submit an issue with -a proposal for your work first, to be sure that we can use it. +Before you submit your Pull Request (PR) consider the following guidelines: -* **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr). +- Search the repository (]/[repository-name]/pulls) for an open or closed PR + that relates to your submission. You don't want to duplicate effort. +- Make your changes in a new git fork +- Follow [Code style conventions](#code-style) +- [Run the tests](#running-unit-tests) (and write new ones, if needed) +- Commit your changes using a descriptive commit message +- Push your fork to GitHub +- In GitHub, create a pull request to the `main` branch of the repository +- Ask a maintainer to review your PR and address any comments they might have -## Submission Guidelines +## Setting up the development environment -### Submitting an Issue -Before you submit an issue, search the archive, maybe your question was already answered. +Install the development dependencies: -If your issue appears to be a bug, and hasn't been reported, open a new issue. -Help us to maximize the effort we can spend fixing issues and adding new -features, by not reporting duplicate issues. Providing the following information will increase the -chances of your issue being dealt with quickly: +```shell +python -m pip install -r requirements-dev.txt +``` -* **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps -* **Version** - what version is affected (e.g. 0.1.2) -* **Motivation for or Use Case** - explain what are you trying to do and why the current behavior is a bug for you -* **Browsers and Operating System** - is this a problem with all browsers? -* **Reproduce the Error** - provide a live example or a unambiguous set of steps -* **Related Issues** - has a similar issue been reported before? -* **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be - causing the problem (line of code or commit) +Install the pre-commit hooks: -You can file new issues by providing the above information at the corresponding repository's issues link: https://github.com/[organization-name]/[repository-name]/issues/new]. +```shell +pre-commit install +``` -### Submitting a Pull Request (PR) -Before you submit your Pull Request (PR) consider the following guidelines: +Compile the JavaScript: -* Search the repository (https://github.com/[organization-name]/[repository-name]/pulls) for an open or closed PR - that relates to your submission. You don't want to duplicate effort. -* Make your changes in a new git fork -* Follow [Code style conventions](#style) -* [Run the tests](#tests) (and write new ones, if needed) -* Commit your changes using a descriptive commit message -* Push your fork to GitHub -* In GitHub, create a pull request to the `main` branch of the repository -* Ask a maintainer to review your PR and address any comments they might have +```shell +( cd ./app/frontend ; npm install ; npm run build ) +``` -## Running tests +## Running unit tests -Install the development dependencies: +Run the tests: -``` -python3 -m pip install -r requirements-dev.txt +```shell +python -m pytest ``` -Install the pre-commit hooks: +Check the coverage report to make sure your changes are covered. +```shell +python -m pytest --cov ``` -pre-commit install + +## Running E2E tests + +Install Playwright browser dependencies: + +```shell +playwright install --with-deps ``` Run the tests: +```shell +python -m pytest tests/e2e.py --tracing=retain-on-failure ``` -python3 -m pytest + +When a failure happens, the trace zip will be saved in the test-results folder. +You can view that using the Playwright CLI: + +```shell +playwright show-trace test-results/ ``` -## Code Style +You can also use the online trace viewer at + +## Code style This codebase includes several languages: TypeScript, Python, Bicep, Powershell, and Bash. Code should follow the standard conventions of each language. @@ -97,18 +100,49 @@ For Python, you can enforce the conventions using `ruff` and `black`. Install the development dependencies: -``` -python3 -m pip install -r requirements-dev.txt +```shell +python -m pip install -r requirements-dev.txt ``` Run `ruff` to lint a file: -``` -python3 -m ruff +```shell +python -m ruff ``` Run `black` to format a file: +```shell +python -m black ``` -python3 -m black -``` + +If you followed the steps above to install the pre-commit hooks, then you can just wait for those hooks to run `ruff` and `black` for you. + +## Adding new azd environment variables + +When adding new azd environment variables, please remember to update: + +1. [main.parameters.json](./infra/main.parameters.json) +1. [appEnvVariables in main.bicep](./infra/main.bicep) +1. App Service's [azure.yaml](./azure.yaml) +1. [ADO pipeline](.azdo/pipelines/azure-dev.yml). +1. [Github workflows](.github/workflows/azure-dev.yml) + +## Adding new UI strings + +When adding new UI strings, please remember to update all translations. +For any translations that you generate with an AI tool, +please indicate in the PR description which language's strings were AI-generated. + +Here are community contributors that can review translations: + +| Language | Contributor | +|----------|---------------------| +| Danish | @EMjetrot | +| French | @manekinekko | +| Japanese | @bnodir | +| Norwegian| @@jeannotdamoiseaux | +| Portugese| @glaucia86 | +| Spanish | @miguelmsft | +| Turkish | @mertcakdogan | +| Italian | @ivanvaccarics | diff --git a/LICENSE.md b/LICENSE.md deleted file mode 100644 index 79656060de..0000000000 --- a/LICENSE.md +++ /dev/null @@ -1,21 +0,0 @@ - MIT License - - Copyright (c) Microsoft Corporation. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE \ No newline at end of file diff --git a/README.md b/README.md index 872b736f8d..08cc88fd77 100644 --- a/README.md +++ b/README.md @@ -1,161 +1,292 @@ -# ChatGPT + Enterprise data with Azure OpenAI and Cognitive Search + + +# RAG chat app with Azure OpenAI and Azure AI Search (Python) + +This solution creates a ChatGPT-like frontend experience over your own documents using RAG (Retrieval Augmented Generation). It uses Azure OpenAI Service to access GPT models, and Azure AI Search for data indexing and retrieval. + +This solution's backend is written in Python. There are also [**JavaScript**](https://aka.ms/azai/js/code), [**.NET**](https://aka.ms/azai/net/code), and [**Java**](https://aka.ms/azai/java/code) samples based on this one. Learn more about [developing AI apps using Azure AI Services](https://aka.ms/azai). [![Open in GitHub Codespaces](https://img.shields.io/static/v1?style=for-the-badge&label=GitHub+Codespaces&message=Open&color=brightgreen&logo=github)](https://github.com/codespaces/new?hide_repo_select=true&ref=main&repo=599293758&machine=standardLinux32gb&devcontainer_path=.devcontainer%2Fdevcontainer.json&location=WestUs2) -[![Open in Remote - Containers](https://img.shields.io/static/v1?style=for-the-badge&label=Remote%20-%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/azure-samples/azure-search-openai-demo) +[![Open in Dev Containers](https://img.shields.io/static/v1?style=for-the-badge&label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/azure-samples/azure-search-openai-demo) -This sample demonstrates a few approaches for creating ChatGPT-like experiences over your own data using the Retrieval Augmented Generation pattern. It uses Azure OpenAI Service to access the ChatGPT model (gpt-35-turbo), and Azure Cognitive Search for data indexing and retrieval. +## Important Security Notice -The repo includes sample data so it's ready to try end to end. In this sample application we use a fictitious company called Contoso Electronics, and the experience allows its employees to ask questions about the benefits, internal policies, as well as job descriptions and roles. +This template, the application code and configuration it contains, has been built to showcase Microsoft Azure specific services and tools. We strongly advise our customers not to make this code part of their production environments without implementing or enabling additional security features. See our [productionizing guide](docs/productionizing.md) for tips, and consult the [Azure OpenAI Landing Zone reference architecture](https://techcommunity.microsoft.com/blog/azurearchitectureblog/azure-openai-landing-zone-reference-architecture/3882102) for more best practices. + +## Table of Contents + +- [Features](#features) +- [Azure account requirements](#azure-account-requirements) + - [Cost estimation](#cost-estimation) +- [Getting Started](#getting-started) + - [GitHub Codespaces](#github-codespaces) + - [VS Code Dev Containers](#vs-code-dev-containers) + - [Local environment](#local-environment) +- [Deploying](#deploying) + - [Deploying again](#deploying-again) +- [Running the development server](#running-the-development-server) +- [Using the app](#using-the-app) +- [Clean up](#clean-up) +- [Guidance](#guidance) + - [Resources](#resources) + +![Chat screen](docs/images/chatscreen.png) -![RAG Architecture](docs/appcomponents.png) +[📺 Watch a video overview of the app.](https://youtu.be/3acB0OWmLvM) + +This sample demonstrates a few approaches for creating ChatGPT-like experiences over your own data using the Retrieval Augmented Generation pattern. It uses Azure OpenAI Service to access a GPT model (gpt-4o-mini), and Azure AI Search for data indexing and retrieval. + +The repo includes sample data so it's ready to try end to end. In this sample application we use a fictitious company called Contoso Electronics, and the experience allows its employees to ask questions about the benefits, internal policies, as well as job descriptions and roles. ## Features -* Chat and Q&A interfaces -* Explores various options to help users evaluate the trustworthiness of responses with citations, tracking of source content, etc. -* Shows possible approaches for data preparation, prompt construction, and orchestration of interaction between model (ChatGPT) and retriever (Cognitive Search) -* Settings directly in the UX to tweak the behavior and experiment with options +- Chat (multi-turn) and Q&A (single turn) interfaces +- Renders citations and thought process for each answer +- Includes settings directly in the UI to tweak the behavior and experiment with options +- Integrates Azure AI Search for indexing and retrieval of documents, with support for [many document formats](/docs/data_ingestion.md#supported-document-formats) as well as [integrated vectorization](/docs/data_ingestion.md#overview-of-integrated-vectorization) +- Optional usage of [GPT-4 with vision](/docs/gpt4v.md) to reason over image-heavy documents +- Optional addition of [speech input/output](/docs/deploy_features.md#enabling-speech-inputoutput) for accessibility +- Optional automation of [user login and data access](/docs/login_and_acl.md) via Microsoft Entra +- Performance tracing and monitoring with Application Insights -![Chat screen](docs/chatscreen.png) +### Architecture Diagram -## Getting Started +![RAG Architecture](docs/images/appcomponents.png) -> **IMPORTANT:** In order to deploy and run this example, you'll need an **Azure subscription with access enabled for the Azure OpenAI service**. You can request access [here](https://aka.ms/oaiapply). You can also visit [here](https://azure.microsoft.com/free/cognitive-search/) to get some free Azure credits to get you started. +## Azure account requirements -> **AZURE RESOURCE COSTS** by default this sample will create Azure App Service and Azure Cognitive Search resources that have a monthly cost, as well as Form Recognizer resource that has cost per document page. You can switch them to free versions of each of them if you want to avoid this cost by changing the parameters file under the infra folder (though there are some limits to consider; for example, you can have up to 1 free Cognitive Search resource per subscription, and the free Form Recognizer resource only analyzes the first 2 pages of each document.) +**IMPORTANT:** In order to deploy and run this example, you'll need: -### Prerequisites +- **Azure account**. If you're new to Azure, [get an Azure account for free](https://azure.microsoft.com/free/cognitive-search/) and you'll get some free Azure credits to get started. See [guide to deploying with the free trial](docs/deploy_freetrial.md). +- **Azure account permissions**: + - Your Azure account must have `Microsoft.Authorization/roleAssignments/write` permissions, such as [Role Based Access Control Administrator](https://learn.microsoft.com/azure/role-based-access-control/built-in-roles#role-based-access-control-administrator-preview), [User Access Administrator](https://learn.microsoft.com/azure/role-based-access-control/built-in-roles#user-access-administrator), or [Owner](https://learn.microsoft.com/azure/role-based-access-control/built-in-roles#owner). If you don't have subscription-level permissions, you must be granted [RBAC](https://learn.microsoft.com/azure/role-based-access-control/built-in-roles#role-based-access-control-administrator-preview) for an existing resource group and [deploy to that existing group](docs/deploy_existing.md#resource-group). + - Your Azure account also needs `Microsoft.Resources/deployments/write` permissions on the subscription level. -#### To Run Locally +### Cost estimation -* [Azure Developer CLI](https://aka.ms/azure-dev/install) -* [Python 3.9+](https://www.python.org/downloads/) - * **Important**: Python and the pip package manager must be in the path in Windows for the setup scripts to work. - * **Important**: Ensure you can run `python --version` from console. On Ubuntu, you might need to run `sudo apt install python-is-python3` to link `python` to `python3`. -* [Node.js 14+](https://nodejs.org/en/download/) -* [Git](https://git-scm.com/downloads) -* [Powershell 7+ (pwsh)](https://github.com/powershell/powershell) - For Windows users only. - * **Important**: Ensure you can run `pwsh.exe` from a PowerShell command. If this fails, you likely need to upgrade PowerShell. +Pricing varies per region and usage, so it isn't possible to predict exact costs for your usage. +However, you can try the [Azure pricing calculator](https://azure.com/e/e3490de2372a4f9b909b0d032560e41b) for the resources below. ->NOTE: Your Azure Account must have `Microsoft.Authorization/roleAssignments/write` permissions, such as [User Access Administrator](https://learn.microsoft.com/azure/role-based-access-control/built-in-roles#user-access-administrator) or [Owner](https://learn.microsoft.com/azure/role-based-access-control/built-in-roles#owner). +- Azure Container Apps: Default host for app deployment as of 10/28/2024. See more details in [the ACA deployment guide](docs/azure_container_apps.md). Consumption plan with 1 CPU core, 2 GB RAM, minimum of 0 replicas. Pricing with Pay-as-You-Go. [Pricing](https://azure.microsoft.com/pricing/details/container-apps/) +- Azure Container Registry: Basic tier. [Pricing](https://azure.microsoft.com/pricing/details/container-registry/) +- Azure App Service: Only provisioned if you deploy to Azure App Service following [the App Service deployment guide](docs/azure_app_service.md). Basic Tier with 1 CPU core, 1.75 GB RAM. Pricing per hour. [Pricing](https://azure.microsoft.com/pricing/details/app-service/linux/) +- Azure OpenAI: Standard tier, GPT and Ada models. Pricing per 1K tokens used, and at least 1K tokens are used per question. [Pricing](https://azure.microsoft.com/pricing/details/cognitive-services/openai-service/) +- Azure AI Document Intelligence: SO (Standard) tier using pre-built layout. Pricing per document page, sample documents have 261 pages total. [Pricing](https://azure.microsoft.com/pricing/details/form-recognizer/) +- Azure AI Search: Basic tier, 1 replica, free level of semantic search. Pricing per hour. [Pricing](https://azure.microsoft.com/pricing/details/search/) +- Azure Blob Storage: Standard tier with ZRS (Zone-redundant storage). Pricing per storage and read operations. [Pricing](https://azure.microsoft.com/pricing/details/storage/blobs/) +- Azure Cosmos DB: Only provisioned if you enabled [chat history with Cosmos DB](docs/deploy_features.md#enabling-persistent-chat-history-with-azure-cosmos-db). Serverless tier. Pricing per request unit and storage. [Pricing](https://azure.microsoft.com/pricing/details/cosmos-db/) +- Azure AI Vision: Only provisioned if you enabled [GPT-4 with vision](docs/gpt4v.md). Pricing per 1K transactions. [Pricing](https://azure.microsoft.com/pricing/details/cognitive-services/computer-vision/) +- Azure AI Content Understanding: Only provisioned if you enabled [media description](docs/deploy_features.md#enabling-media-description-with-azure-content-understanding). Pricing per 1K images. [Pricing](https://azure.microsoft.com/pricing/details/content-understanding/) +- Azure Monitor: Pay-as-you-go tier. Costs based on data ingested. [Pricing](https://azure.microsoft.com/pricing/details/monitor/) -#### To Run in GitHub Codespaces or VS Code Remote Containers +To reduce costs, you can switch to free SKUs for various services, but those SKUs have limitations. +See this guide on [deploying with minimal costs](docs/deploy_lowcost.md) for more details. -You can run this repo virtually by using GitHub Codespaces or VS Code Remote Containers. Click on one of the buttons below to open this repo in one of those options. +⚠️ To avoid unnecessary costs, remember to take down your app if it's no longer in use, +either by deleting the resource group in the Portal or running `azd down`. -[![Open in GitHub Codespaces](https://img.shields.io/static/v1?style=for-the-badge&label=GitHub+Codespaces&message=Open&color=brightgreen&logo=github)](https://github.com/codespaces/new?hide_repo_select=true&ref=main&repo=599293758&machine=standardLinux32gb&devcontainer_path=.devcontainer%2Fdevcontainer.json&location=WestUs2) -[![Open in Remote - Containers](https://img.shields.io/static/v1?style=for-the-badge&label=Remote%20-%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/azure-samples/azure-search-openai-demo) +## Getting Started -### Installation +You have a few options for setting up this project. +The easiest way to get started is GitHub Codespaces, since it will setup all the tools for you, +but you can also [set it up locally](#local-environment) if desired. -#### Project Initialization +### GitHub Codespaces -1. Create a new folder and switch to it in the terminal -1. Run `azd auth login` -1. Run `azd init -t azure-search-openai-demo` - * note that this command will initialize a git repository and you do not need to clone this repository +You can run this repo virtually by using GitHub Codespaces, which will open a web-based VS Code in your browser: -#### Starting from scratch +[![Open in GitHub Codespaces](https://img.shields.io/static/v1?style=for-the-badge&label=GitHub+Codespaces&message=Open&color=brightgreen&logo=github)](https://github.com/codespaces/new?hide_repo_select=true&ref=main&repo=599293758&machine=standardLinux32gb&devcontainer_path=.devcontainer%2Fdevcontainer.json&location=WestUs2) -Execute the following command, if you don't have any pre-existing Azure services and want to start from a fresh deployment. +Once the codespace opens (this may take several minutes), open a terminal window. -1. Run `azd up` - This will provision Azure resources and deploy this sample to those resources, including building the search index based on the files found in the `./data` folder. - * For the target location, the regions that currently support the models used in this sample are **East US**, **South Central US**, and **West Europe**. For an up-to-date list of regions and models, check [here](https://learn.microsoft.com/azure/cognitive-services/openai/concepts/models#model-summary-table-and-region-availability). -1. After the application has been successfully deployed you will see a URL printed to the console. Click that URL to interact with the application in your browser. +### VS Code Dev Containers -It will look like the following: +A related option is VS Code Dev Containers, which will open the project in your local VS Code using the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers): -!['Output from running azd up'](assets/endpoint.png) +1. Start Docker Desktop (install it if not already installed) +2. Open the project: + [![Open in Dev Containers](https://img.shields.io/static/v1?style=for-the-badge&label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/azure-samples/azure-search-openai-demo) -> NOTE: It may take a minute for the application to be fully deployed. If you see a "Python Developer" welcome screen, then wait a minute and refresh the page. +3. In the VS Code window that opens, once the project files show up (this may take several minutes), open a terminal window. -#### Using existing resources +### Local environment -1. Run `azd env set AZURE_OPENAI_SERVICE {Name of existing OpenAI service}` -1. Run `azd env set AZURE_OPENAI_RESOURCE_GROUP {Name of existing resource group that OpenAI service is provisioned to}` -1. Run `azd env set AZURE_OPENAI_CHATGPT_DEPLOYMENT {Name of existing ChatGPT deployment}`. Only needed if your ChatGPT deployment is not the default 'chat'. -1. Run `azd env set AZURE_OPENAI_GPT_DEPLOYMENT {Name of existing GPT deployment}`. Only needed if your ChatGPT deployment is not the default 'davinci'. -1. Run `azd env set AZURE_OPENAI_EMB_DEPLOYMENT {Name of existing GPT embedding deployment}`. Only needed if your embeddings deployment is not the default 'embedding'. -1. Run `azd up` +1. Install the required tools: -> NOTE: You can also use existing Search and Storage Accounts. See `./infra/main.parameters.json` for list of environment variables to pass to `azd env set` to configure those existing resources. + - [Azure Developer CLI](https://aka.ms/azure-dev/install) + - [Python 3.9, 3.10, or 3.11](https://www.python.org/downloads/) + - **Important**: Python and the pip package manager must be in the path in Windows for the setup scripts to work. + - **Important**: Ensure you can run `python --version` from console. On Ubuntu, you might need to run `sudo apt install python-is-python3` to link `python` to `python3`. + - [Node.js 20+](https://nodejs.org/download/) + - [Git](https://git-scm.com/downloads) + - [Powershell 7+ (pwsh)](https://github.com/powershell/powershell) - For Windows users only. + - **Important**: Ensure you can run `pwsh.exe` from a PowerShell terminal. If this fails, you likely need to upgrade PowerShell. -#### Deploying again +2. Create a new folder and switch to it in the terminal. +3. Run this command to download the project code: -If you've only changed the backend/frontend code in the `app` folder, then you don't need to re-provision the Azure resources. You can just run: + ```shell + azd init -t azure-search-openai-demo + ``` -```azd deploy``` + Note that this command will initialize a git repository, so you do not need to clone this repository. -If you've changed the infrastructure files (`infra` folder or `azure.yaml`), then you'll need to re-provision the Azure resources. You can do that by running: +## Deploying -```azd up``` +The steps below will provision Azure resources and deploy the application code to Azure Container Apps. To deploy to Azure App Service instead, follow [the app service deployment guide](docs/azure_app_service.md). -#### Running locally +1. Login to your Azure account: -1. Run `azd login` -2. Change dir to `app` -3. Run `./start.ps1` or `./start.sh` or run the "VS Code Task: Start App" to start the project locally. + ```shell + azd auth login + ``` -#### Sharing Environments + For GitHub Codespaces users, if the previous command fails, try: -To give someone else access to a completely deployed and existing environment, -either you or they can follow these steps: + ```shell + azd auth login --use-device-code + ``` -1. Install the [Azure CLI](https://learn.microsoft.com/cli/azure/install-azure-cli) -1. Run `azd init -t azure-search-openai-demo` or clone this repository. -1. Run `azd env refresh -e {environment name}` - They will need the azd environment name, subscription ID, and location to run this command. You can find those values in your `.azure/{env name}/.env` file. This will populate their azd environment's `.env` file with all the settings needed to run the app locally. -1. Set the environment variable `AZURE_PRINCIPAL_ID` either in that `.env` file or in the active shell to their Azure ID, which they can get with `az account show`. -1. Run `./scripts/roles.ps1` or `.scripts/roles.sh` to assign all of the necessary roles to the user. If they do not have the necessary permission to create roles in the subscription, then you may need to run this script for them. Once the script runs, they should be able to run the app locally. +1. Create a new azd environment: -### Quickstart + ```shell + azd env new + ``` -* In Azure: navigate to the Azure WebApp deployed by azd. The URL is printed out when azd completes (as "Endpoint"), or you can find it in the Azure portal. -* Running locally: navigate to 127.0.0.1:5000 + Enter a name that will be used for the resource group. + This will create a new folder in the `.azure` folder, and set it as the active environment for any calls to `azd` going forward. +1. (Optional) This is the point where you can customize the deployment by setting environment variables, in order to [use existing resources](docs/deploy_existing.md), [enable optional features (such as auth or vision)](docs/deploy_features.md), or [deploy low-cost options](docs/deploy_lowcost.md), or [deploy with the Azure free trial](docs/deploy_freetrial.md). +1. Run `azd up` - This will provision Azure resources and deploy this sample to those resources, including building the search index based on the files found in the `./data` folder. + - **Important**: Beware that the resources created by this command will incur immediate costs, primarily from the AI Search resource. These resources may accrue costs even if you interrupt the command before it is fully executed. You can run `azd down` or delete the resources manually to avoid unnecessary spending. + - You will be prompted to select two locations, one for the majority of resources and one for the OpenAI resource, which is currently a short list. That location list is based on the [OpenAI model availability table](https://learn.microsoft.com/azure/cognitive-services/openai/concepts/models#model-summary-table-and-region-availability) and may become outdated as availability changes. +1. After the application has been successfully deployed you will see a URL printed to the console. Click that URL to interact with the application in your browser. +It will look like the following: -Once in the web app: +!['Output from running azd up'](docs/images/endpoint.png) -* Try different topics in chat or Q&A context. For chat, try follow up questions, clarifications, ask to simplify or elaborate on answer, etc. -* Explore citations and sources -* Click on "settings" to try different options, tweak prompts, etc. +> NOTE: It may take 5-10 minutes after you see 'SUCCESS' for the application to be fully deployed. If you see a "Python Developer" welcome screen or an error page, then wait a bit and refresh the page. -## Resources +### Deploying again -* [Revolutionize your Enterprise Data with ChatGPT: Next-gen Apps w/ Azure OpenAI and Cognitive Search](https://aka.ms/entgptsearchblog) -* [Azure Cognitive Search](https://learn.microsoft.com/azure/search/search-what-is-azure-search) -* [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/overview) +If you've only changed the backend/frontend code in the `app` folder, then you don't need to re-provision the Azure resources. You can just run: -### Note +```shell +azd deploy +``` ->Note: The PDF documents used in this demo contain information generated using a language model (Azure OpenAI Service). The information contained in these documents is only for demonstration purposes and does not reflect the opinions or beliefs of Microsoft. Microsoft makes no representations or warranties of any kind, express or implied, about the completeness, accuracy, reliability, suitability or availability with respect to the information contained in this document. All rights reserved to Microsoft. +If you've changed the infrastructure files (`infra` folder or `azure.yaml`), then you'll need to re-provision the Azure resources. You can do that by running: + +```shell +azd up +``` -### FAQ +## Running the development server -
-Why do we need to break up the PDFs into chunks when Azure Cognitive Search supports searching large documents? +You can only run a development server locally **after** having successfully run the `azd up` command. If you haven't yet, follow the [deploying](#deploying) steps above. -Chunking allows us to limit the amount of information we send to OpenAI due to token limits. By breaking up the content, it allows us to easily find potential chunks of text that we can inject into OpenAI. The method of chunking we use leverages a sliding window of text such that sentences that end one chunk will start the next. This allows us to reduce the chance of losing the context of the text. -
+1. Run `azd auth login` if you have not logged in recently. +2. Start the server: -
-How can we upload additional PDFs without redeploying everything? + Windows: -To upload more PDFs, put them in the data/ folder and run `./scripts/prepdocs.sh` or `./scripts/prepdocs.ps1`. To avoid reuploading existing docs, move them out of the data folder. You could also implement checks to see whats been uploaded before; our code doesn't yet have such checks. -
+ ```shell + ./app/start.ps1 + ``` -### Troubleshooting + Linux/Mac: -Here are the most common failure scenarios and solutions: + ```shell + ./app/start.sh + ``` -1. The subscription (`AZURE_SUBSCRIPTION_ID`) doesn't have access to the Azure OpenAI service. Please ensure `AZURE_SUBSCRIPTION_ID` matches the ID specified in the [OpenAI access request process](https://aka.ms/oai/access). + VS Code: Run the "VS Code Task: Start App" task. -1. You're attempting to create resources in regions not enabled for Azure OpenAI (e.g. East US 2 instead of East US), or where the model you're trying to use isn't enabled. See [this matrix of model availability](https://aka.ms/oai/models). +It's also possible to enable hotloading or the VS Code debugger. +See more tips in [the local development guide](docs/localdev.md). -1. You've exceeded a quota, most often number of resources per region. See [this article on quotas and limits](https://aka.ms/oai/quotas). +## Using the app -1. You're getting "same resource name not allowed" conflicts. That's likely because you've run the sample multiple times and deleted the resources you've been creating each time, but are forgetting to purge them. Azure keeps resources for 48 hours unless you purge from soft delete. See [this article on purging resources](https://learn.microsoft.com/azure/cognitive-services/manage-resources?tabs=azure-portal#purge-a-deleted-resource). +- In Azure: navigate to the Azure WebApp deployed by azd. The URL is printed out when azd completes (as "Endpoint"), or you can find it in the Azure portal. +- Running locally: navigate to 127.0.0.1:50505 + +Once in the web app: -1. You see `CERTIFICATE_VERIFY_FAILED` when the `prepdocs.py` script runs. That's typically due to incorrect SSL certificates setup on your machine. Try the suggestions in this [StackOverflow answer](https://stackoverflow.com/questions/35569042/ssl-certificate-verify-failed-with-python3/43855394#43855394). +- Try different topics in chat or Q&A context. For chat, try follow up questions, clarifications, ask to simplify or elaborate on answer, etc. +- Explore citations and sources +- Click on "settings" to try different options, tweak prompts, etc. + +## Clean up + +To clean up all the resources created by this sample: + +1. Run `azd down` +2. When asked if you are sure you want to continue, enter `y` +3. When asked if you want to permanently delete the resources, enter `y` + +The resource group and all the resources will be deleted. + +## Guidance + +You can find extensive documentation in the [docs](docs/README.md) folder: + +- Deploying: + - [Troubleshooting deployment](docs/deploy_troubleshooting.md) + - [Debugging the app on App Service](docs/appservice.md) + - [Deploying with azd: deep dive and CI/CD](docs/azd.md) + - [Deploying with existing Azure resources](docs/deploy_existing.md) + - [Deploying from a free account](docs/deploy_lowcost.md) + - [Enabling optional features](docs/deploy_features.md) + - [All features](docs/deploy_features.md) + - [Login and access control](docs/login_and_acl.md) + - [GPT-4 Turbo with Vision](docs/gpt4v.md) + - [Reasoning](docs/reasoning.md) + - [Private endpoints](docs/deploy_private.md) + - [Sharing deployment environments](docs/sharing_environments.md) +- [Local development](docs/localdev.md) +- [Customizing the app](docs/customization.md) +- [Data ingestion](docs/data_ingestion.md) +- [Evaluation](docs/evaluation.md) +- [Safety evaluation](docs/safety_evaluation.md) +- [Monitoring with Application Insights](docs/monitoring.md) +- [Productionizing](docs/productionizing.md) +- [Alternative RAG chat samples](docs/other_samples.md) + +### Resources + +- [📖 Docs: Get started using the chat with your data sample](https://learn.microsoft.com/azure/developer/python/get-started-app-chat-template?toc=%2Fazure%2Fdeveloper%2Fai%2Ftoc.json&bc=%2Fazure%2Fdeveloper%2Fai%2Fbreadcrumb%2Ftoc.json&tabs=github-codespaces) +- [📖 Blog: Revolutionize your Enterprise Data with ChatGPT: Next-gen Apps w/ Azure OpenAI and AI Search](https://techcommunity.microsoft.com/blog/azure-ai-services-blog/revolutionize-your-enterprise-data-with-chatgpt-next-gen-apps-w-azure-openai-and/3762087) +- [📖 Docs: Azure AI Search](https://learn.microsoft.com/azure/search/search-what-is-azure-search) +- [📖 Docs: Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/overview) +- [📖 Docs: Comparing Azure OpenAI and OpenAI](https://learn.microsoft.com/azure/cognitive-services/openai/overview#comparing-azure-openai-and-openai/) +- [📖 Blog: Access Control in Generative AI applications with Azure AI Search](https://techcommunity.microsoft.com/blog/azure-ai-services-blog/access-control-in-generative-ai-applications-with-azure-ai-search/3956408) +- [📺 Talk: Quickly build and deploy OpenAI apps on Azure, infused with your own data](https://www.youtube.com/watch?v=j8i-OM5kwiY) +- [📺 Talks: AI Chat App Hack series](https://www.youtube.com/playlist?list=PL5lwDBUC0ag6_dGZst5m3G72ewfwXLcXV) + +### Getting help + +This is a sample built to demonstrate the capabilities of modern Generative AI apps and how they can be built in Azure. +For help with deploying this sample, please post in [GitHub Issues](/issues). If you're a Microsoft employee, you can also post in [our Teams channel](https://aka.ms/azai-python-help). + +This repository is supported by the maintainers, _not_ by Microsoft Support, +so please use the support mechanisms described above, and we will do our best to help you out. -1. After running `azd up` and visiting the website, you see a '404 Not Found' in the browser. Wait 10 minutes and try again, as it might be still starting up. Then try running `azd deploy` and wait again. If you still encounter errors with the deployed app, consult these [tips for debugging Flask app deployments](http://blog.pamelafox.org/2023/06/tips-for-debugging-flask-deployments-to.html) -and file an issue if the error logs don't help you resolve the issue. +### Note + +>Note: The PDF documents used in this demo contain information generated using a language model (Azure OpenAI Service). The information contained in these documents is only for demonstration purposes and does not reflect the opinions or beliefs of Microsoft. Microsoft makes no representations or warranties of any kind, express or implied, about the completeness, accuracy, reliability, suitability or availability with respect to the information contained in this document. All rights reserved to Microsoft. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..388e9ad471 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,41 @@ + + +## Security + +Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). + +If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](), please report it to us as described below. + +## Reporting Security Issues + +**Please do not report security vulnerabilities through public GitHub issues.** + +Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report). + +If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/msrc/pgp-key-msrc). + +You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). + +Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: + +- Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) +- Full paths of source file(s) related to the manifestation of the issue +- The location of the affected source code (tag/branch/commit or direct URL) +- Any special configuration required to reproduce the issue +- Step-by-step instructions to reproduce the issue +- Proof-of-concept or exploit code (if possible) +- Impact of the issue, including how an attacker might exploit the issue + +This information will help us triage your report more quickly. + +If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs. + +## Preferred Languages + +We prefer all communications to be in English. + +## Policy + +Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/msrc/cvd). + + diff --git a/app/backend/.dockerignore b/app/backend/.dockerignore new file mode 100644 index 0000000000..9008115fc8 --- /dev/null +++ b/app/backend/.dockerignore @@ -0,0 +1,7 @@ +.git +__pycache__ +*.pyc +*.pyo +*.pyd +.Python +env diff --git a/app/backend/Dockerfile b/app/backend/Dockerfile new file mode 100644 index 0000000000..a84bd6e0b7 --- /dev/null +++ b/app/backend/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.11-bullseye + +WORKDIR /app + +COPY ./ /app + +RUN python -m pip install -r requirements.txt + +RUN python -m pip install gunicorn + +CMD ["python3", "-m", "gunicorn", "-b", "0.0.0.0:8000", "main:app"] diff --git a/app/backend/app.py b/app/backend/app.py index 97dff7d737..1b4563bb98 100644 --- a/app/backend/app.py +++ b/app/backend/app.py @@ -1,195 +1,836 @@ +import dataclasses import io +import json import logging import mimetypes import os import time +from collections.abc import AsyncGenerator +from pathlib import Path +from typing import Any, Union, cast -import openai -from azure.identity import DefaultAzureCredential -from azure.search.documents import SearchClient -from azure.storage.blob import BlobServiceClient -from flask import ( +from azure.cognitiveservices.speech import ( + ResultReason, + SpeechConfig, + SpeechSynthesisOutputFormat, + SpeechSynthesisResult, + SpeechSynthesizer, +) +from azure.core.exceptions import ResourceNotFoundError +from azure.identity.aio import ( + AzureDeveloperCliCredential, + ManagedIdentityCredential, + get_bearer_token_provider, +) +from azure.monitor.opentelemetry import configure_azure_monitor +from azure.search.documents.agent.aio import KnowledgeAgentRetrievalClient +from azure.search.documents.aio import SearchClient +from azure.search.documents.indexes.aio import SearchIndexClient +from azure.storage.blob.aio import ContainerClient +from azure.storage.blob.aio import StorageStreamDownloader as BlobDownloader +from azure.storage.filedatalake.aio import FileSystemClient +from azure.storage.filedatalake.aio import StorageStreamDownloader as DatalakeDownloader +from openai import AsyncAzureOpenAI, AsyncOpenAI +from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor +from opentelemetry.instrumentation.asgi import OpenTelemetryMiddleware +from opentelemetry.instrumentation.httpx import ( + HTTPXClientInstrumentor, +) +from opentelemetry.instrumentation.openai import OpenAIInstrumentor +from quart import ( Blueprint, - Flask, + Quart, abort, current_app, jsonify, + make_response, request, send_file, send_from_directory, ) +from quart_cors import cors +from approaches.approach import Approach from approaches.chatreadretrieveread import ChatReadRetrieveReadApproach -from approaches.readdecomposeask import ReadDecomposeAsk -from approaches.readretrieveread import ReadRetrieveReadApproach +from approaches.chatreadretrievereadvision import ChatReadRetrieveReadVisionApproach +from approaches.promptmanager import PromptyManager from approaches.retrievethenread import RetrieveThenReadApproach +from approaches.retrievethenreadvision import RetrieveThenReadVisionApproach +from chat_history.cosmosdb import chat_history_cosmosdb_bp +from config import ( + CONFIG_AGENT_CLIENT, + CONFIG_AGENTIC_RETRIEVAL_ENABLED, + CONFIG_ASK_APPROACH, + CONFIG_ASK_VISION_APPROACH, + CONFIG_AUTH_CLIENT, + CONFIG_BLOB_CONTAINER_CLIENT, + CONFIG_CHAT_APPROACH, + CONFIG_CHAT_HISTORY_BROWSER_ENABLED, + CONFIG_CHAT_HISTORY_COSMOS_ENABLED, + CONFIG_CHAT_VISION_APPROACH, + CONFIG_CREDENTIAL, + CONFIG_DEFAULT_REASONING_EFFORT, + CONFIG_GPT4V_DEPLOYED, + CONFIG_INGESTER, + CONFIG_LANGUAGE_PICKER_ENABLED, + CONFIG_OPENAI_CLIENT, + CONFIG_QUERY_REWRITING_ENABLED, + CONFIG_REASONING_EFFORT_ENABLED, + CONFIG_SEARCH_CLIENT, + CONFIG_SEMANTIC_RANKER_DEPLOYED, + CONFIG_SPEECH_INPUT_ENABLED, + CONFIG_SPEECH_OUTPUT_AZURE_ENABLED, + CONFIG_SPEECH_OUTPUT_BROWSER_ENABLED, + CONFIG_SPEECH_SERVICE_ID, + CONFIG_SPEECH_SERVICE_LOCATION, + CONFIG_SPEECH_SERVICE_TOKEN, + CONFIG_SPEECH_SERVICE_VOICE, + CONFIG_STREAMING_ENABLED, + CONFIG_USER_BLOB_CONTAINER_CLIENT, + CONFIG_USER_UPLOAD_ENABLED, + CONFIG_VECTOR_SEARCH_ENABLED, +) +from core.authentication import AuthenticationHelper +from core.sessionhelper import create_session_id +from decorators import authenticated, authenticated_path +from error import error_dict, error_response +from prepdocs import ( + clean_key_if_exists, + setup_embeddings_service, + setup_file_processors, + setup_search_info, +) +from prepdocslib.filestrategy import UploadUserFileStrategy +from prepdocslib.listfilestrategy import File -# Replace these with your own values, either in environment variables or directly here -AZURE_STORAGE_ACCOUNT = os.getenv("AZURE_STORAGE_ACCOUNT", "mystorageaccount") -AZURE_STORAGE_CONTAINER = os.getenv("AZURE_STORAGE_CONTAINER", "content") -AZURE_SEARCH_SERVICE = os.getenv("AZURE_SEARCH_SERVICE", "gptkb") -AZURE_SEARCH_INDEX = os.getenv("AZURE_SEARCH_INDEX", "gptkbindex") -AZURE_OPENAI_SERVICE = os.getenv("AZURE_OPENAI_SERVICE", "myopenai") -AZURE_OPENAI_GPT_DEPLOYMENT = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT", "davinci") -AZURE_OPENAI_CHATGPT_DEPLOYMENT = os.getenv("AZURE_OPENAI_CHATGPT_DEPLOYMENT", "chat") -AZURE_OPENAI_CHATGPT_MODEL = os.getenv("AZURE_OPENAI_CHATGPT_MODEL", "gpt-35-turbo") -AZURE_OPENAI_EMB_DEPLOYMENT = os.getenv("AZURE_OPENAI_EMB_DEPLOYMENT", "embedding") +bp = Blueprint("routes", __name__, static_folder="static") +# Fix Windows registry issue with mimetypes +mimetypes.add_type("application/javascript", ".js") +mimetypes.add_type("text/css", ".css") -KB_FIELDS_CONTENT = os.getenv("KB_FIELDS_CONTENT", "content") -KB_FIELDS_CATEGORY = os.getenv("KB_FIELDS_CATEGORY", "category") -KB_FIELDS_SOURCEPAGE = os.getenv("KB_FIELDS_SOURCEPAGE", "sourcepage") -CONFIG_OPENAI_TOKEN = "openai_token" -CONFIG_CREDENTIAL = "azure_credential" -CONFIG_ASK_APPROACHES = "ask_approaches" -CONFIG_CHAT_APPROACHES = "chat_approaches" -CONFIG_BLOB_CLIENT = "blob_client" +@bp.route("/") +async def index(): + return await bp.send_static_file("index.html") -bp = Blueprint("routes", __name__, static_folder='static') +# Empty page is recommended for login redirect to work. +# See https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/dev/lib/msal-browser/docs/initialization.md#redirecturi-considerations for more information +@bp.route("/redirect") +async def redirect(): + return "" -@bp.route("/") -def index(): - return bp.send_static_file("index.html") @bp.route("/favicon.ico") -def favicon(): - return bp.send_static_file("favicon.ico") +async def favicon(): + return await bp.send_static_file("favicon.ico") + @bp.route("/assets/") -def assets(path): - return send_from_directory("static/assets", path) +async def assets(path): + return await send_from_directory(Path(__file__).resolve().parent / "static" / "assets", path) + -# Serve content files from blob storage from within the app to keep the example self-contained. -# *** NOTE *** this assumes that the content files are public, or at least that all users of the app -# can access all the files. This is also slow and memory hungry. @bp.route("/content/") -def content_file(path): - blob_container = current_app.config[CONFIG_BLOB_CLIENT].get_container_client(AZURE_STORAGE_CONTAINER) - blob = blob_container.get_blob_client(path).download_blob() +@authenticated_path +async def content_file(path: str, auth_claims: dict[str, Any]): + """ + Serve content files from blob storage from within the app to keep the example self-contained. + *** NOTE *** if you are using app services authentication, this route will return unauthorized to all users that are not logged in + if AZURE_ENFORCE_ACCESS_CONTROL is not set or false, logged in users can access all files regardless of access control + if AZURE_ENFORCE_ACCESS_CONTROL is set to true, logged in users can only access files they have access to + This is also slow and memory hungry. + """ + # Remove page number from path, filename-1.txt -> filename.txt + # This shouldn't typically be necessary as browsers don't send hash fragments to servers + if path.find("#page=") > 0: + path_parts = path.rsplit("#page=", 1) + path = path_parts[0] + current_app.logger.info("Opening file %s", path) + blob_container_client: ContainerClient = current_app.config[CONFIG_BLOB_CONTAINER_CLIENT] + blob: Union[BlobDownloader, DatalakeDownloader] + try: + blob = await blob_container_client.get_blob_client(path).download_blob() + except ResourceNotFoundError: + current_app.logger.info("Path not found in general Blob container: %s", path) + if current_app.config[CONFIG_USER_UPLOAD_ENABLED]: + try: + user_oid = auth_claims["oid"] + user_blob_container_client = current_app.config[CONFIG_USER_BLOB_CONTAINER_CLIENT] + user_directory_client: FileSystemClient = user_blob_container_client.get_directory_client(user_oid) + file_client = user_directory_client.get_file_client(path) + blob = await file_client.download_file() + except ResourceNotFoundError: + current_app.logger.exception("Path not found in DataLake: %s", path) + abort(404) + else: + abort(404) if not blob.properties or not blob.properties.has_key("content_settings"): abort(404) mime_type = blob.properties["content_settings"]["content_type"] if mime_type == "application/octet-stream": mime_type = mimetypes.guess_type(path)[0] or "application/octet-stream" blob_file = io.BytesIO() - blob.readinto(blob_file) + await blob.readinto(blob_file) blob_file.seek(0) - return send_file(blob_file, mimetype=mime_type, as_attachment=False, download_name=path) + return await send_file(blob_file, mimetype=mime_type, as_attachment=False, attachment_filename=path) + @bp.route("/ask", methods=["POST"]) -def ask(): +@authenticated +async def ask(auth_claims: dict[str, Any]): if not request.is_json: return jsonify({"error": "request must be json"}), 415 - approach = request.json["approach"] + request_json = await request.get_json() + context = request_json.get("context", {}) + context["auth_claims"] = auth_claims try: - impl = current_app.config[CONFIG_ASK_APPROACHES].get(approach) - if not impl: - return jsonify({"error": "unknown approach"}), 400 - r = impl.run(request.json["question"], request.json.get("overrides") or {}) + use_gpt4v = context.get("overrides", {}).get("use_gpt4v", False) + approach: Approach + if use_gpt4v and CONFIG_ASK_VISION_APPROACH in current_app.config: + approach = cast(Approach, current_app.config[CONFIG_ASK_VISION_APPROACH]) + else: + approach = cast(Approach, current_app.config[CONFIG_ASK_APPROACH]) + r = await approach.run( + request_json["messages"], context=context, session_state=request_json.get("session_state") + ) return jsonify(r) - except Exception as e: - logging.exception("Exception in /ask") - return jsonify({"error": str(e)}), 500 + except Exception as error: + return error_response(error, "/ask") + + +class JSONEncoder(json.JSONEncoder): + def default(self, o): + if dataclasses.is_dataclass(o) and not isinstance(o, type): + return dataclasses.asdict(o) + return super().default(o) + + +async def format_as_ndjson(r: AsyncGenerator[dict, None]) -> AsyncGenerator[str, None]: + try: + async for event in r: + yield json.dumps(event, ensure_ascii=False, cls=JSONEncoder) + "\n" + except Exception as error: + logging.exception("Exception while generating response stream: %s", error) + yield json.dumps(error_dict(error)) + @bp.route("/chat", methods=["POST"]) -def chat(): +@authenticated +async def chat(auth_claims: dict[str, Any]): if not request.is_json: return jsonify({"error": "request must be json"}), 415 - approach = request.json["approach"] + request_json = await request.get_json() + context = request_json.get("context", {}) + context["auth_claims"] = auth_claims try: - impl = current_app.config[CONFIG_CHAT_APPROACHES].get(approach) - if not impl: - return jsonify({"error": "unknown approach"}), 400 - r = impl.run(request.json["history"], request.json.get("overrides") or {}) - return jsonify(r) + use_gpt4v = context.get("overrides", {}).get("use_gpt4v", False) + approach: Approach + if use_gpt4v and CONFIG_CHAT_VISION_APPROACH in current_app.config: + approach = cast(Approach, current_app.config[CONFIG_CHAT_VISION_APPROACH]) + else: + approach = cast(Approach, current_app.config[CONFIG_CHAT_APPROACH]) + + # If session state is provided, persists the session state, + # else creates a new session_id depending on the chat history options enabled. + session_state = request_json.get("session_state") + if session_state is None: + session_state = create_session_id( + current_app.config[CONFIG_CHAT_HISTORY_COSMOS_ENABLED], + current_app.config[CONFIG_CHAT_HISTORY_BROWSER_ENABLED], + ) + result = await approach.run( + request_json["messages"], + context=context, + session_state=session_state, + ) + return jsonify(result) + except Exception as error: + return error_response(error, "/chat") + + +@bp.route("/chat/stream", methods=["POST"]) +@authenticated +async def chat_stream(auth_claims: dict[str, Any]): + if not request.is_json: + return jsonify({"error": "request must be json"}), 415 + request_json = await request.get_json() + context = request_json.get("context", {}) + context["auth_claims"] = auth_claims + try: + use_gpt4v = context.get("overrides", {}).get("use_gpt4v", False) + approach: Approach + if use_gpt4v and CONFIG_CHAT_VISION_APPROACH in current_app.config: + approach = cast(Approach, current_app.config[CONFIG_CHAT_VISION_APPROACH]) + else: + approach = cast(Approach, current_app.config[CONFIG_CHAT_APPROACH]) + + # If session state is provided, persists the session state, + # else creates a new session_id depending on the chat history options enabled. + session_state = request_json.get("session_state") + if session_state is None: + session_state = create_session_id( + current_app.config[CONFIG_CHAT_HISTORY_COSMOS_ENABLED], + current_app.config[CONFIG_CHAT_HISTORY_BROWSER_ENABLED], + ) + result = await approach.run_stream( + request_json["messages"], + context=context, + session_state=session_state, + ) + response = await make_response(format_as_ndjson(result)) + response.timeout = None # type: ignore + response.mimetype = "application/json-lines" + return response + except Exception as error: + return error_response(error, "/chat") + + +# Send MSAL.js settings to the client UI +@bp.route("/auth_setup", methods=["GET"]) +def auth_setup(): + auth_helper = current_app.config[CONFIG_AUTH_CLIENT] + return jsonify(auth_helper.get_auth_setup_for_client()) + + +@bp.route("/config", methods=["GET"]) +def config(): + return jsonify( + { + "showGPT4VOptions": current_app.config[CONFIG_GPT4V_DEPLOYED], + "showSemanticRankerOption": current_app.config[CONFIG_SEMANTIC_RANKER_DEPLOYED], + "showQueryRewritingOption": current_app.config[CONFIG_QUERY_REWRITING_ENABLED], + "showReasoningEffortOption": current_app.config[CONFIG_REASONING_EFFORT_ENABLED], + "streamingEnabled": current_app.config[CONFIG_STREAMING_ENABLED], + "defaultReasoningEffort": current_app.config[CONFIG_DEFAULT_REASONING_EFFORT], + "showVectorOption": current_app.config[CONFIG_VECTOR_SEARCH_ENABLED], + "showUserUpload": current_app.config[CONFIG_USER_UPLOAD_ENABLED], + "showLanguagePicker": current_app.config[CONFIG_LANGUAGE_PICKER_ENABLED], + "showSpeechInput": current_app.config[CONFIG_SPEECH_INPUT_ENABLED], + "showSpeechOutputBrowser": current_app.config[CONFIG_SPEECH_OUTPUT_BROWSER_ENABLED], + "showSpeechOutputAzure": current_app.config[CONFIG_SPEECH_OUTPUT_AZURE_ENABLED], + "showChatHistoryBrowser": current_app.config[CONFIG_CHAT_HISTORY_BROWSER_ENABLED], + "showChatHistoryCosmos": current_app.config[CONFIG_CHAT_HISTORY_COSMOS_ENABLED], + "showAgenticRetrievalOption": current_app.config[CONFIG_AGENTIC_RETRIEVAL_ENABLED], + } + ) + + +@bp.route("/speech", methods=["POST"]) +async def speech(): + if not request.is_json: + return jsonify({"error": "request must be json"}), 415 + + speech_token = current_app.config.get(CONFIG_SPEECH_SERVICE_TOKEN) + if speech_token is None or speech_token.expires_on < time.time() + 60: + speech_token = await current_app.config[CONFIG_CREDENTIAL].get_token( + "https://cognitiveservices.azure.com/.default" + ) + current_app.config[CONFIG_SPEECH_SERVICE_TOKEN] = speech_token + + request_json = await request.get_json() + text = request_json["text"] + try: + # Construct a token as described in documentation: + # https://learn.microsoft.com/azure/ai-services/speech-service/how-to-configure-azure-ad-auth?pivots=programming-language-python + auth_token = ( + "aad#" + + current_app.config[CONFIG_SPEECH_SERVICE_ID] + + "#" + + current_app.config[CONFIG_SPEECH_SERVICE_TOKEN].token + ) + speech_config = SpeechConfig(auth_token=auth_token, region=current_app.config[CONFIG_SPEECH_SERVICE_LOCATION]) + speech_config.speech_synthesis_voice_name = current_app.config[CONFIG_SPEECH_SERVICE_VOICE] + speech_config.speech_synthesis_output_format = SpeechSynthesisOutputFormat.Audio16Khz32KBitRateMonoMp3 + synthesizer = SpeechSynthesizer(speech_config=speech_config, audio_config=None) + result: SpeechSynthesisResult = synthesizer.speak_text_async(text).get() + if result.reason == ResultReason.SynthesizingAudioCompleted: + return result.audio_data, 200, {"Content-Type": "audio/mp3"} + elif result.reason == ResultReason.Canceled: + cancellation_details = result.cancellation_details + current_app.logger.error( + "Speech synthesis canceled: %s %s", cancellation_details.reason, cancellation_details.error_details + ) + raise Exception("Speech synthesis canceled. Check logs for details.") + else: + current_app.logger.error("Unexpected result reason: %s", result.reason) + raise Exception("Speech synthesis failed. Check logs for details.") except Exception as e: - logging.exception("Exception in /chat") + current_app.logger.exception("Exception in /speech") return jsonify({"error": str(e)}), 500 -@bp.before_request -def ensure_openai_token(): - openai_token = current_app.config[CONFIG_OPENAI_TOKEN] - if openai_token.expires_on < time.time() + 60: - openai_token = current_app.config[CONFIG_CREDENTIAL].get_token("https://cognitiveservices.azure.com/.default") - current_app.config[CONFIG_OPENAI_TOKEN] = openai_token - openai.api_key = openai_token.token +@bp.post("/upload") +@authenticated +async def upload(auth_claims: dict[str, Any]): + request_files = await request.files + if "file" not in request_files: + # If no files were included in the request, return an error response + return jsonify({"message": "No file part in the request", "status": "failed"}), 400 -def create_app(): - app = Flask(__name__) + user_oid = auth_claims["oid"] + file = request_files.getlist("file")[0] + user_blob_container_client: FileSystemClient = current_app.config[CONFIG_USER_BLOB_CONTAINER_CLIENT] + user_directory_client = user_blob_container_client.get_directory_client(user_oid) + try: + await user_directory_client.get_directory_properties() + except ResourceNotFoundError: + current_app.logger.info("Creating directory for user %s", user_oid) + await user_directory_client.create_directory() + await user_directory_client.set_access_control(owner=user_oid) + file_client = user_directory_client.get_file_client(file.filename) + file_io = file + file_io.name = file.filename + file_io = io.BufferedReader(file_io) + await file_client.upload_data(file_io, overwrite=True, metadata={"UploadedBy": user_oid}) + file_io.seek(0) + ingester: UploadUserFileStrategy = current_app.config[CONFIG_INGESTER] + await ingester.add_file(File(content=file_io, acls={"oids": [user_oid]}, url=file_client.url)) + return jsonify({"message": "File uploaded successfully"}), 200 - # Use the current user identity to authenticate with Azure OpenAI, Cognitive Search and Blob Storage (no secrets needed, - # just use 'az login' locally, and managed identity when deployed on Azure). If you need to use keys, use separate AzureKeyCredential instances with the - # keys for each service - # If you encounter a blocking error during a DefaultAzureCredntial resolution, you can exclude the problematic credential by using a parameter (ex. exclude_shared_token_cache_credential=True) - azure_credential = DefaultAzureCredential(exclude_shared_token_cache_credential = True) - # Set up clients for Cognitive Search and Storage +@bp.post("/delete_uploaded") +@authenticated +async def delete_uploaded(auth_claims: dict[str, Any]): + request_json = await request.get_json() + filename = request_json.get("filename") + user_oid = auth_claims["oid"] + user_blob_container_client: FileSystemClient = current_app.config[CONFIG_USER_BLOB_CONTAINER_CLIENT] + user_directory_client = user_blob_container_client.get_directory_client(user_oid) + file_client = user_directory_client.get_file_client(filename) + await file_client.delete_file() + ingester = current_app.config[CONFIG_INGESTER] + await ingester.remove_file(filename, user_oid) + return jsonify({"message": f"File {filename} deleted successfully"}), 200 + + +@bp.get("/list_uploaded") +@authenticated +async def list_uploaded(auth_claims: dict[str, Any]): + user_oid = auth_claims["oid"] + user_blob_container_client: FileSystemClient = current_app.config[CONFIG_USER_BLOB_CONTAINER_CLIENT] + files = [] + try: + all_paths = user_blob_container_client.get_paths(path=user_oid) + async for path in all_paths: + files.append(path.name.split("/", 1)[1]) + except ResourceNotFoundError as error: + if error.status_code != 404: + current_app.logger.exception("Error listing uploaded files", error) + return jsonify(files), 200 + + +@bp.before_app_serving +async def setup_clients(): + # Replace these with your own values, either in environment variables or directly here + AZURE_STORAGE_ACCOUNT = os.environ["AZURE_STORAGE_ACCOUNT"] + AZURE_STORAGE_CONTAINER = os.environ["AZURE_STORAGE_CONTAINER"] + AZURE_USERSTORAGE_ACCOUNT = os.environ.get("AZURE_USERSTORAGE_ACCOUNT") + AZURE_USERSTORAGE_CONTAINER = os.environ.get("AZURE_USERSTORAGE_CONTAINER") + AZURE_SEARCH_SERVICE = os.environ["AZURE_SEARCH_SERVICE"] + AZURE_SEARCH_ENDPOINT = f"https://{AZURE_SEARCH_SERVICE}.search.windows.net" + AZURE_SEARCH_INDEX = os.environ["AZURE_SEARCH_INDEX"] + AZURE_SEARCH_AGENT = os.getenv("AZURE_SEARCH_AGENT", "") + # Shared by all OpenAI deployments + OPENAI_HOST = os.getenv("OPENAI_HOST", "azure") + OPENAI_CHATGPT_MODEL = os.environ["AZURE_OPENAI_CHATGPT_MODEL"] + AZURE_OPENAI_SEARCHAGENT_MODEL = os.getenv("AZURE_OPENAI_SEARCHAGENT_MODEL") + AZURE_OPENAI_SEARCHAGENT_DEPLOYMENT = os.getenv("AZURE_OPENAI_SEARCHAGENT_DEPLOYMENT") + OPENAI_EMB_MODEL = os.getenv("AZURE_OPENAI_EMB_MODEL_NAME", "text-embedding-ada-002") + OPENAI_EMB_DIMENSIONS = int(os.getenv("AZURE_OPENAI_EMB_DIMENSIONS") or 1536) + OPENAI_REASONING_EFFORT = os.getenv("AZURE_OPENAI_REASONING_EFFORT") + # Used with Azure OpenAI deployments + AZURE_OPENAI_SERVICE = os.getenv("AZURE_OPENAI_SERVICE") + AZURE_OPENAI_GPT4V_DEPLOYMENT = os.environ.get("AZURE_OPENAI_GPT4V_DEPLOYMENT") + AZURE_OPENAI_GPT4V_MODEL = os.environ.get("AZURE_OPENAI_GPT4V_MODEL") + AZURE_OPENAI_CHATGPT_DEPLOYMENT = ( + os.getenv("AZURE_OPENAI_CHATGPT_DEPLOYMENT") if OPENAI_HOST.startswith("azure") else None + ) + AZURE_OPENAI_EMB_DEPLOYMENT = os.getenv("AZURE_OPENAI_EMB_DEPLOYMENT") if OPENAI_HOST.startswith("azure") else None + AZURE_OPENAI_CUSTOM_URL = os.getenv("AZURE_OPENAI_CUSTOM_URL") + # https://learn.microsoft.com/azure/ai-services/openai/api-version-deprecation#latest-ga-api-release + AZURE_OPENAI_API_VERSION = os.getenv("AZURE_OPENAI_API_VERSION") or "2024-10-21" + AZURE_VISION_ENDPOINT = os.getenv("AZURE_VISION_ENDPOINT", "") + # Used only with non-Azure OpenAI deployments + OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") + OPENAI_ORGANIZATION = os.getenv("OPENAI_ORGANIZATION") + + AZURE_TENANT_ID = os.getenv("AZURE_TENANT_ID") + AZURE_USE_AUTHENTICATION = os.getenv("AZURE_USE_AUTHENTICATION", "").lower() == "true" + AZURE_ENFORCE_ACCESS_CONTROL = os.getenv("AZURE_ENFORCE_ACCESS_CONTROL", "").lower() == "true" + AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS = os.getenv("AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS", "").lower() == "true" + AZURE_ENABLE_UNAUTHENTICATED_ACCESS = os.getenv("AZURE_ENABLE_UNAUTHENTICATED_ACCESS", "").lower() == "true" + AZURE_SERVER_APP_ID = os.getenv("AZURE_SERVER_APP_ID") + AZURE_SERVER_APP_SECRET = os.getenv("AZURE_SERVER_APP_SECRET") + AZURE_CLIENT_APP_ID = os.getenv("AZURE_CLIENT_APP_ID") + AZURE_AUTH_TENANT_ID = os.getenv("AZURE_AUTH_TENANT_ID", AZURE_TENANT_ID) + + KB_FIELDS_CONTENT = os.getenv("KB_FIELDS_CONTENT", "content") + KB_FIELDS_SOURCEPAGE = os.getenv("KB_FIELDS_SOURCEPAGE", "sourcepage") + + AZURE_SEARCH_QUERY_LANGUAGE = os.getenv("AZURE_SEARCH_QUERY_LANGUAGE") or "en-us" + AZURE_SEARCH_QUERY_SPELLER = os.getenv("AZURE_SEARCH_QUERY_SPELLER") or "lexicon" + AZURE_SEARCH_SEMANTIC_RANKER = os.getenv("AZURE_SEARCH_SEMANTIC_RANKER", "free").lower() + AZURE_SEARCH_QUERY_REWRITING = os.getenv("AZURE_SEARCH_QUERY_REWRITING", "false").lower() + # This defaults to the previous field name "embedding", for backwards compatibility + AZURE_SEARCH_FIELD_NAME_EMBEDDING = os.getenv("AZURE_SEARCH_FIELD_NAME_EMBEDDING", "embedding") + + AZURE_SPEECH_SERVICE_ID = os.getenv("AZURE_SPEECH_SERVICE_ID") + AZURE_SPEECH_SERVICE_LOCATION = os.getenv("AZURE_SPEECH_SERVICE_LOCATION") + AZURE_SPEECH_SERVICE_VOICE = os.getenv("AZURE_SPEECH_SERVICE_VOICE") or "en-US-AndrewMultilingualNeural" + + USE_GPT4V = os.getenv("USE_GPT4V", "").lower() == "true" + USE_USER_UPLOAD = os.getenv("USE_USER_UPLOAD", "").lower() == "true" + ENABLE_LANGUAGE_PICKER = os.getenv("ENABLE_LANGUAGE_PICKER", "").lower() == "true" + USE_SPEECH_INPUT_BROWSER = os.getenv("USE_SPEECH_INPUT_BROWSER", "").lower() == "true" + USE_SPEECH_OUTPUT_BROWSER = os.getenv("USE_SPEECH_OUTPUT_BROWSER", "").lower() == "true" + USE_SPEECH_OUTPUT_AZURE = os.getenv("USE_SPEECH_OUTPUT_AZURE", "").lower() == "true" + USE_CHAT_HISTORY_BROWSER = os.getenv("USE_CHAT_HISTORY_BROWSER", "").lower() == "true" + USE_CHAT_HISTORY_COSMOS = os.getenv("USE_CHAT_HISTORY_COSMOS", "").lower() == "true" + USE_AGENTIC_RETRIEVAL = os.getenv("USE_AGENTIC_RETRIEVAL", "").lower() == "true" + + # WEBSITE_HOSTNAME is always set by App Service, RUNNING_IN_PRODUCTION is set in main.bicep + RUNNING_ON_AZURE = os.getenv("WEBSITE_HOSTNAME") is not None or os.getenv("RUNNING_IN_PRODUCTION") is not None + + # Use the current user identity for keyless authentication to Azure services. + # This assumes you use 'azd auth login' locally, and managed identity when deployed on Azure. + # The managed identity is setup in the infra/ folder. + azure_credential: Union[AzureDeveloperCliCredential, ManagedIdentityCredential] + if RUNNING_ON_AZURE: + current_app.logger.info("Setting up Azure credential using ManagedIdentityCredential") + if AZURE_CLIENT_ID := os.getenv("AZURE_CLIENT_ID"): + # ManagedIdentityCredential should use AZURE_CLIENT_ID if set in env, but its not working for some reason, + # so we explicitly pass it in as the client ID here. This is necessary for user-assigned managed identities. + current_app.logger.info( + "Setting up Azure credential using ManagedIdentityCredential with client_id %s", AZURE_CLIENT_ID + ) + azure_credential = ManagedIdentityCredential(client_id=AZURE_CLIENT_ID) + else: + current_app.logger.info("Setting up Azure credential using ManagedIdentityCredential") + azure_credential = ManagedIdentityCredential() + elif AZURE_TENANT_ID: + current_app.logger.info( + "Setting up Azure credential using AzureDeveloperCliCredential with tenant_id %s", AZURE_TENANT_ID + ) + azure_credential = AzureDeveloperCliCredential(tenant_id=AZURE_TENANT_ID, process_timeout=60) + else: + current_app.logger.info("Setting up Azure credential using AzureDeveloperCliCredential for home tenant") + azure_credential = AzureDeveloperCliCredential(process_timeout=60) + + # Set the Azure credential in the app config for use in other parts of the app + current_app.config[CONFIG_CREDENTIAL] = azure_credential + + # Set up clients for AI Search and Storage search_client = SearchClient( - endpoint=f"https://{AZURE_SEARCH_SERVICE}.search.windows.net", + endpoint=AZURE_SEARCH_ENDPOINT, index_name=AZURE_SEARCH_INDEX, - credential=azure_credential) - blob_client = BlobServiceClient( - account_url=f"https://{AZURE_STORAGE_ACCOUNT}.blob.core.windows.net", - credential=azure_credential) + credential=azure_credential, + ) + agent_client = KnowledgeAgentRetrievalClient( + endpoint=AZURE_SEARCH_ENDPOINT, agent_name=AZURE_SEARCH_AGENT, credential=azure_credential + ) + + blob_container_client = ContainerClient( + f"https://{AZURE_STORAGE_ACCOUNT}.blob.core.windows.net", AZURE_STORAGE_CONTAINER, credential=azure_credential + ) + + # Set up authentication helper + search_index = None + if AZURE_USE_AUTHENTICATION: + current_app.logger.info("AZURE_USE_AUTHENTICATION is true, setting up search index client") + search_index_client = SearchIndexClient( + endpoint=AZURE_SEARCH_ENDPOINT, + credential=azure_credential, + ) + search_index = await search_index_client.get_index(AZURE_SEARCH_INDEX) + await search_index_client.close() + auth_helper = AuthenticationHelper( + search_index=search_index, + use_authentication=AZURE_USE_AUTHENTICATION, + server_app_id=AZURE_SERVER_APP_ID, + server_app_secret=AZURE_SERVER_APP_SECRET, + client_app_id=AZURE_CLIENT_APP_ID, + tenant_id=AZURE_AUTH_TENANT_ID, + require_access_control=AZURE_ENFORCE_ACCESS_CONTROL, + enable_global_documents=AZURE_ENABLE_GLOBAL_DOCUMENT_ACCESS, + enable_unauthenticated_access=AZURE_ENABLE_UNAUTHENTICATED_ACCESS, + ) + + if USE_USER_UPLOAD: + current_app.logger.info("USE_USER_UPLOAD is true, setting up user upload feature") + if not AZURE_USERSTORAGE_ACCOUNT or not AZURE_USERSTORAGE_CONTAINER: + raise ValueError( + "AZURE_USERSTORAGE_ACCOUNT and AZURE_USERSTORAGE_CONTAINER must be set when USE_USER_UPLOAD is true" + ) + user_blob_container_client = FileSystemClient( + f"https://{AZURE_USERSTORAGE_ACCOUNT}.dfs.core.windows.net", + AZURE_USERSTORAGE_CONTAINER, + credential=azure_credential, + ) + current_app.config[CONFIG_USER_BLOB_CONTAINER_CLIENT] = user_blob_container_client + + # Set up ingester + file_processors = setup_file_processors( + azure_credential=azure_credential, + document_intelligence_service=os.getenv("AZURE_DOCUMENTINTELLIGENCE_SERVICE"), + local_pdf_parser=os.getenv("USE_LOCAL_PDF_PARSER", "").lower() == "true", + local_html_parser=os.getenv("USE_LOCAL_HTML_PARSER", "").lower() == "true", + search_images=USE_GPT4V, + ) + search_info = await setup_search_info( + search_service=AZURE_SEARCH_SERVICE, index_name=AZURE_SEARCH_INDEX, azure_credential=azure_credential + ) + text_embeddings_service = setup_embeddings_service( + azure_credential=azure_credential, + openai_host=OPENAI_HOST, + openai_model_name=OPENAI_EMB_MODEL, + openai_service=AZURE_OPENAI_SERVICE, + openai_custom_url=AZURE_OPENAI_CUSTOM_URL, + openai_deployment=AZURE_OPENAI_EMB_DEPLOYMENT, + openai_dimensions=OPENAI_EMB_DIMENSIONS, + openai_api_version=AZURE_OPENAI_API_VERSION, + openai_key=clean_key_if_exists(OPENAI_API_KEY), + openai_org=OPENAI_ORGANIZATION, + disable_vectors=os.getenv("USE_VECTORS", "").lower() == "false", + ) + ingester = UploadUserFileStrategy( + search_info=search_info, + embeddings=text_embeddings_service, + file_processors=file_processors, + search_field_name_embedding=AZURE_SEARCH_FIELD_NAME_EMBEDDING, + ) + current_app.config[CONFIG_INGESTER] = ingester # Used by the OpenAI SDK - openai.api_type = "azure" - openai.api_base = f"https://{AZURE_OPENAI_SERVICE}.openai.azure.com" - openai.api_version = "2023-05-15" - - # Comment these two lines out if using keys, set your API key in the OPENAI_API_KEY environment variable instead - openai.api_type = "azure_ad" - openai_token = azure_credential.get_token( - "https://cognitiveservices.azure.com/.default" + openai_client: AsyncOpenAI + + if USE_SPEECH_OUTPUT_AZURE: + current_app.logger.info("USE_SPEECH_OUTPUT_AZURE is true, setting up Azure speech service") + if not AZURE_SPEECH_SERVICE_ID or AZURE_SPEECH_SERVICE_ID == "": + raise ValueError("Azure speech resource not configured correctly, missing AZURE_SPEECH_SERVICE_ID") + if not AZURE_SPEECH_SERVICE_LOCATION or AZURE_SPEECH_SERVICE_LOCATION == "": + raise ValueError("Azure speech resource not configured correctly, missing AZURE_SPEECH_SERVICE_LOCATION") + current_app.config[CONFIG_SPEECH_SERVICE_ID] = AZURE_SPEECH_SERVICE_ID + current_app.config[CONFIG_SPEECH_SERVICE_LOCATION] = AZURE_SPEECH_SERVICE_LOCATION + current_app.config[CONFIG_SPEECH_SERVICE_VOICE] = AZURE_SPEECH_SERVICE_VOICE + # Wait until token is needed to fetch for the first time + current_app.config[CONFIG_SPEECH_SERVICE_TOKEN] = None + + if OPENAI_HOST.startswith("azure"): + if OPENAI_HOST == "azure_custom": + current_app.logger.info("OPENAI_HOST is azure_custom, setting up Azure OpenAI custom client") + if not AZURE_OPENAI_CUSTOM_URL: + raise ValueError("AZURE_OPENAI_CUSTOM_URL must be set when OPENAI_HOST is azure_custom") + endpoint = AZURE_OPENAI_CUSTOM_URL + else: + current_app.logger.info("OPENAI_HOST is azure, setting up Azure OpenAI client") + if not AZURE_OPENAI_SERVICE: + raise ValueError("AZURE_OPENAI_SERVICE must be set when OPENAI_HOST is azure") + endpoint = f"https://{AZURE_OPENAI_SERVICE}.openai.azure.com" + if api_key := os.getenv("AZURE_OPENAI_API_KEY_OVERRIDE"): + current_app.logger.info("AZURE_OPENAI_API_KEY_OVERRIDE found, using as api_key for Azure OpenAI client") + openai_client = AsyncAzureOpenAI( + api_version=AZURE_OPENAI_API_VERSION, azure_endpoint=endpoint, api_key=api_key + ) + else: + current_app.logger.info("Using Azure credential (passwordless authentication) for Azure OpenAI client") + token_provider = get_bearer_token_provider(azure_credential, "https://cognitiveservices.azure.com/.default") + openai_client = AsyncAzureOpenAI( + api_version=AZURE_OPENAI_API_VERSION, + azure_endpoint=endpoint, + azure_ad_token_provider=token_provider, + ) + elif OPENAI_HOST == "local": + current_app.logger.info("OPENAI_HOST is local, setting up local OpenAI client for OPENAI_BASE_URL with no key") + openai_client = AsyncOpenAI( + base_url=os.environ["OPENAI_BASE_URL"], + api_key="no-key-required", + ) + else: + current_app.logger.info( + "OPENAI_HOST is not azure, setting up OpenAI client using OPENAI_API_KEY and OPENAI_ORGANIZATION environment variables" + ) + openai_client = AsyncOpenAI( + api_key=OPENAI_API_KEY, + organization=OPENAI_ORGANIZATION, + ) + + current_app.config[CONFIG_OPENAI_CLIENT] = openai_client + current_app.config[CONFIG_SEARCH_CLIENT] = search_client + current_app.config[CONFIG_AGENT_CLIENT] = agent_client + current_app.config[CONFIG_BLOB_CONTAINER_CLIENT] = blob_container_client + current_app.config[CONFIG_AUTH_CLIENT] = auth_helper + + current_app.config[CONFIG_GPT4V_DEPLOYED] = bool(USE_GPT4V) + current_app.config[CONFIG_SEMANTIC_RANKER_DEPLOYED] = AZURE_SEARCH_SEMANTIC_RANKER != "disabled" + current_app.config[CONFIG_QUERY_REWRITING_ENABLED] = ( + AZURE_SEARCH_QUERY_REWRITING == "true" and AZURE_SEARCH_SEMANTIC_RANKER != "disabled" + ) + current_app.config[CONFIG_DEFAULT_REASONING_EFFORT] = OPENAI_REASONING_EFFORT + current_app.config[CONFIG_REASONING_EFFORT_ENABLED] = OPENAI_CHATGPT_MODEL in Approach.GPT_REASONING_MODELS + current_app.config[CONFIG_STREAMING_ENABLED] = ( + bool(USE_GPT4V) + or OPENAI_CHATGPT_MODEL not in Approach.GPT_REASONING_MODELS + or Approach.GPT_REASONING_MODELS[OPENAI_CHATGPT_MODEL].streaming ) - openai.api_key = openai_token.token - - # Store on app.config for later use inside requests - app.config[CONFIG_OPENAI_TOKEN] = openai_token - app.config[CONFIG_CREDENTIAL] = azure_credential - app.config[CONFIG_BLOB_CLIENT] = blob_client - # Various approaches to integrate GPT and external knowledge, most applications will use a single one of these patterns - # or some derivative, here we include several for exploration purposes - app.config[CONFIG_ASK_APPROACHES] = { - "rtr": RetrieveThenReadApproach( - search_client, - AZURE_OPENAI_CHATGPT_DEPLOYMENT, - AZURE_OPENAI_CHATGPT_MODEL, - AZURE_OPENAI_EMB_DEPLOYMENT, - KB_FIELDS_SOURCEPAGE, - KB_FIELDS_CONTENT - ), - "rrr": ReadRetrieveReadApproach( - search_client, - AZURE_OPENAI_GPT_DEPLOYMENT, - AZURE_OPENAI_EMB_DEPLOYMENT, - KB_FIELDS_SOURCEPAGE, - KB_FIELDS_CONTENT - ), - "rda": ReadDecomposeAsk(search_client, - AZURE_OPENAI_GPT_DEPLOYMENT, - AZURE_OPENAI_EMB_DEPLOYMENT, - KB_FIELDS_SOURCEPAGE, - KB_FIELDS_CONTENT - ) - } - app.config[CONFIG_CHAT_APPROACHES] = { - "rrr": ChatReadRetrieveReadApproach( - search_client, - AZURE_OPENAI_CHATGPT_DEPLOYMENT, - AZURE_OPENAI_CHATGPT_MODEL, - AZURE_OPENAI_EMB_DEPLOYMENT, - KB_FIELDS_SOURCEPAGE, - KB_FIELDS_CONTENT, - ) - } + current_app.config[CONFIG_VECTOR_SEARCH_ENABLED] = os.getenv("USE_VECTORS", "").lower() != "false" + current_app.config[CONFIG_USER_UPLOAD_ENABLED] = bool(USE_USER_UPLOAD) + current_app.config[CONFIG_LANGUAGE_PICKER_ENABLED] = ENABLE_LANGUAGE_PICKER + current_app.config[CONFIG_SPEECH_INPUT_ENABLED] = USE_SPEECH_INPUT_BROWSER + current_app.config[CONFIG_SPEECH_OUTPUT_BROWSER_ENABLED] = USE_SPEECH_OUTPUT_BROWSER + current_app.config[CONFIG_SPEECH_OUTPUT_AZURE_ENABLED] = USE_SPEECH_OUTPUT_AZURE + current_app.config[CONFIG_CHAT_HISTORY_BROWSER_ENABLED] = USE_CHAT_HISTORY_BROWSER + current_app.config[CONFIG_CHAT_HISTORY_COSMOS_ENABLED] = USE_CHAT_HISTORY_COSMOS + current_app.config[CONFIG_AGENTIC_RETRIEVAL_ENABLED] = USE_AGENTIC_RETRIEVAL + + prompt_manager = PromptyManager() + + # Set up the two default RAG approaches for /ask and /chat + # RetrieveThenReadApproach is used by /ask for single-turn Q&A + current_app.config[CONFIG_ASK_APPROACH] = RetrieveThenReadApproach( + search_client=search_client, + search_index_name=AZURE_SEARCH_INDEX, + agent_model=AZURE_OPENAI_SEARCHAGENT_MODEL, + agent_deployment=AZURE_OPENAI_SEARCHAGENT_DEPLOYMENT, + agent_client=agent_client, + openai_client=openai_client, + auth_helper=auth_helper, + chatgpt_model=OPENAI_CHATGPT_MODEL, + chatgpt_deployment=AZURE_OPENAI_CHATGPT_DEPLOYMENT, + embedding_model=OPENAI_EMB_MODEL, + embedding_deployment=AZURE_OPENAI_EMB_DEPLOYMENT, + embedding_dimensions=OPENAI_EMB_DIMENSIONS, + embedding_field=AZURE_SEARCH_FIELD_NAME_EMBEDDING, + sourcepage_field=KB_FIELDS_SOURCEPAGE, + content_field=KB_FIELDS_CONTENT, + query_language=AZURE_SEARCH_QUERY_LANGUAGE, + query_speller=AZURE_SEARCH_QUERY_SPELLER, + prompt_manager=prompt_manager, + reasoning_effort=OPENAI_REASONING_EFFORT, + ) + + # ChatReadRetrieveReadApproach is used by /chat for multi-turn conversation + current_app.config[CONFIG_CHAT_APPROACH] = ChatReadRetrieveReadApproach( + search_client=search_client, + search_index_name=AZURE_SEARCH_INDEX, + agent_model=AZURE_OPENAI_SEARCHAGENT_MODEL, + agent_deployment=AZURE_OPENAI_SEARCHAGENT_DEPLOYMENT, + agent_client=agent_client, + openai_client=openai_client, + auth_helper=auth_helper, + chatgpt_model=OPENAI_CHATGPT_MODEL, + chatgpt_deployment=AZURE_OPENAI_CHATGPT_DEPLOYMENT, + embedding_model=OPENAI_EMB_MODEL, + embedding_deployment=AZURE_OPENAI_EMB_DEPLOYMENT, + embedding_dimensions=OPENAI_EMB_DIMENSIONS, + embedding_field=AZURE_SEARCH_FIELD_NAME_EMBEDDING, + sourcepage_field=KB_FIELDS_SOURCEPAGE, + content_field=KB_FIELDS_CONTENT, + query_language=AZURE_SEARCH_QUERY_LANGUAGE, + query_speller=AZURE_SEARCH_QUERY_SPELLER, + prompt_manager=prompt_manager, + reasoning_effort=OPENAI_REASONING_EFFORT, + ) + + if USE_GPT4V: + current_app.logger.info("USE_GPT4V is true, setting up GPT4V approach") + if not AZURE_OPENAI_GPT4V_MODEL: + raise ValueError("AZURE_OPENAI_GPT4V_MODEL must be set when USE_GPT4V is true") + if any( + model in Approach.GPT_REASONING_MODELS + for model in [ + OPENAI_CHATGPT_MODEL, + AZURE_OPENAI_GPT4V_MODEL, + AZURE_OPENAI_CHATGPT_DEPLOYMENT, + AZURE_OPENAI_GPT4V_DEPLOYMENT, + ] + ): + raise ValueError( + "AZURE_OPENAI_CHATGPT_MODEL and AZURE_OPENAI_GPT4V_MODEL must not be a reasoning model when USE_GPT4V is true" + ) + + token_provider = get_bearer_token_provider(azure_credential, "https://cognitiveservices.azure.com/.default") + current_app.config[CONFIG_ASK_VISION_APPROACH] = RetrieveThenReadVisionApproach( + search_client=search_client, + openai_client=openai_client, + blob_container_client=blob_container_client, + auth_helper=auth_helper, + vision_endpoint=AZURE_VISION_ENDPOINT, + vision_token_provider=token_provider, + gpt4v_deployment=AZURE_OPENAI_GPT4V_DEPLOYMENT, + gpt4v_model=AZURE_OPENAI_GPT4V_MODEL, + embedding_model=OPENAI_EMB_MODEL, + embedding_deployment=AZURE_OPENAI_EMB_DEPLOYMENT, + embedding_dimensions=OPENAI_EMB_DIMENSIONS, + embedding_field=AZURE_SEARCH_FIELD_NAME_EMBEDDING, + sourcepage_field=KB_FIELDS_SOURCEPAGE, + content_field=KB_FIELDS_CONTENT, + query_language=AZURE_SEARCH_QUERY_LANGUAGE, + query_speller=AZURE_SEARCH_QUERY_SPELLER, + prompt_manager=prompt_manager, + ) + + current_app.config[CONFIG_CHAT_VISION_APPROACH] = ChatReadRetrieveReadVisionApproach( + search_client=search_client, + openai_client=openai_client, + blob_container_client=blob_container_client, + auth_helper=auth_helper, + vision_endpoint=AZURE_VISION_ENDPOINT, + vision_token_provider=token_provider, + chatgpt_model=OPENAI_CHATGPT_MODEL, + chatgpt_deployment=AZURE_OPENAI_CHATGPT_DEPLOYMENT, + gpt4v_deployment=AZURE_OPENAI_GPT4V_DEPLOYMENT, + gpt4v_model=AZURE_OPENAI_GPT4V_MODEL, + embedding_model=OPENAI_EMB_MODEL, + embedding_deployment=AZURE_OPENAI_EMB_DEPLOYMENT, + embedding_dimensions=OPENAI_EMB_DIMENSIONS, + embedding_field=AZURE_SEARCH_FIELD_NAME_EMBEDDING, + sourcepage_field=KB_FIELDS_SOURCEPAGE, + content_field=KB_FIELDS_CONTENT, + query_language=AZURE_SEARCH_QUERY_LANGUAGE, + query_speller=AZURE_SEARCH_QUERY_SPELLER, + prompt_manager=prompt_manager, + ) + + +@bp.after_app_serving +async def close_clients(): + await current_app.config[CONFIG_SEARCH_CLIENT].close() + await current_app.config[CONFIG_BLOB_CONTAINER_CLIENT].close() + if current_app.config.get(CONFIG_USER_BLOB_CONTAINER_CLIENT): + await current_app.config[CONFIG_USER_BLOB_CONTAINER_CLIENT].close() + + +def create_app(): + app = Quart(__name__) app.register_blueprint(bp) + app.register_blueprint(chat_history_cosmosdb_bp) - return app + if os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING"): + app.logger.info("APPLICATIONINSIGHTS_CONNECTION_STRING is set, enabling Azure Monitor") + configure_azure_monitor() + # This tracks HTTP requests made by aiohttp: + AioHttpClientInstrumentor().instrument() + # This tracks HTTP requests made by httpx: + HTTPXClientInstrumentor().instrument() + # This tracks OpenAI SDK requests: + OpenAIInstrumentor().instrument() + # This middleware tracks app route requests: + app.asgi_app = OpenTelemetryMiddleware(app.asgi_app) # type: ignore[assignment] -if __name__ == "__main__": - app = create_app() - app.run() + # Log levels should be one of https://docs.python.org/3/library/logging.html#logging-levels + # Set root level to WARNING to avoid seeing overly verbose logs from SDKS + logging.basicConfig(level=logging.WARNING) + # Set our own logger levels to INFO by default + app_level = os.getenv("APP_LOG_LEVEL", "INFO") + app.logger.setLevel(os.getenv("APP_LOG_LEVEL", app_level)) + logging.getLogger("scripts").setLevel(app_level) + + if allowed_origin := os.getenv("ALLOWED_ORIGIN"): + allowed_origins = allowed_origin.split(";") + if len(allowed_origins) > 0: + app.logger.info("CORS enabled for %s", allowed_origins) + cors(app, allow_origin=allowed_origins, allow_methods=["GET", "POST"]) + + return app diff --git a/app/backend/approaches/__init__.py b/app/backend/approaches/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/app/backend/approaches/approach.py b/app/backend/approaches/approach.py index 612585d862..7dc81814bd 100644 --- a/app/backend/approaches/approach.py +++ b/app/backend/approaches/approach.py @@ -1,6 +1,493 @@ -from typing import Any +import os +from abc import ABC +from collections.abc import AsyncGenerator, Awaitable +from dataclasses import dataclass +from typing import Any, Callable, Optional, TypedDict, Union, cast +from urllib.parse import urljoin +import aiohttp +from azure.search.documents.agent.aio import KnowledgeAgentRetrievalClient +from azure.search.documents.agent.models import ( + KnowledgeAgentAzureSearchDocReference, + KnowledgeAgentIndexParams, + KnowledgeAgentMessage, + KnowledgeAgentMessageTextContent, + KnowledgeAgentRetrievalRequest, + KnowledgeAgentRetrievalResponse, + KnowledgeAgentSearchActivityRecord, +) +from azure.search.documents.aio import SearchClient +from azure.search.documents.models import ( + QueryCaptionResult, + QueryType, + VectorizedQuery, + VectorQuery, +) +from openai import AsyncOpenAI, AsyncStream +from openai.types import CompletionUsage +from openai.types.chat import ( + ChatCompletion, + ChatCompletionChunk, + ChatCompletionMessageParam, + ChatCompletionReasoningEffort, + ChatCompletionToolParam, +) -class Approach: - def run(self, q: str, overrides: dict[str, Any]) -> Any: +from approaches.promptmanager import PromptManager +from core.authentication import AuthenticationHelper + + +@dataclass +class Document: + id: Optional[str] = None + content: Optional[str] = None + category: Optional[str] = None + sourcepage: Optional[str] = None + sourcefile: Optional[str] = None + oids: Optional[list[str]] = None + groups: Optional[list[str]] = None + captions: Optional[list[QueryCaptionResult]] = None + score: Optional[float] = None + reranker_score: Optional[float] = None + search_agent_query: Optional[str] = None + + def serialize_for_results(self) -> dict[str, Any]: + result_dict = { + "id": self.id, + "content": self.content, + "category": self.category, + "sourcepage": self.sourcepage, + "sourcefile": self.sourcefile, + "oids": self.oids, + "groups": self.groups, + "captions": ( + [ + { + "additional_properties": caption.additional_properties, + "text": caption.text, + "highlights": caption.highlights, + } + for caption in self.captions + ] + if self.captions + else [] + ), + "score": self.score, + "reranker_score": self.reranker_score, + "search_agent_query": self.search_agent_query, + } + return result_dict + + +@dataclass +class ThoughtStep: + title: str + description: Optional[Any] + props: Optional[dict[str, Any]] = None + + def update_token_usage(self, usage: CompletionUsage) -> None: + if self.props: + self.props["token_usage"] = TokenUsageProps.from_completion_usage(usage) + + +@dataclass +class DataPoints: + text: Optional[list[str]] = None + images: Optional[list] = None + + +@dataclass +class ExtraInfo: + data_points: DataPoints + thoughts: Optional[list[ThoughtStep]] = None + followup_questions: Optional[list[Any]] = None + + +@dataclass +class TokenUsageProps: + prompt_tokens: int + completion_tokens: int + reasoning_tokens: Optional[int] + total_tokens: int + + @classmethod + def from_completion_usage(cls, usage: CompletionUsage) -> "TokenUsageProps": + return cls( + prompt_tokens=usage.prompt_tokens, + completion_tokens=usage.completion_tokens, + reasoning_tokens=( + usage.completion_tokens_details.reasoning_tokens if usage.completion_tokens_details else None + ), + total_tokens=usage.total_tokens, + ) + + +# GPT reasoning models don't support the same set of parameters as other models +# https://learn.microsoft.com/azure/ai-services/openai/how-to/reasoning +@dataclass +class GPTReasoningModelSupport: + streaming: bool + + +class Approach(ABC): + # List of GPT reasoning models support + GPT_REASONING_MODELS = { + "o1": GPTReasoningModelSupport(streaming=False), + "o3-mini": GPTReasoningModelSupport(streaming=True), + } + # Set a higher token limit for GPT reasoning models + RESPONSE_DEFAULT_TOKEN_LIMIT = 1024 + RESPONSE_REASONING_DEFAULT_TOKEN_LIMIT = 8192 + + def __init__( + self, + search_client: SearchClient, + openai_client: AsyncOpenAI, + auth_helper: AuthenticationHelper, + query_language: Optional[str], + query_speller: Optional[str], + embedding_deployment: Optional[str], # Not needed for non-Azure OpenAI or for retrieval_mode="text" + embedding_model: str, + embedding_dimensions: int, + embedding_field: str, + openai_host: str, + vision_endpoint: str, + vision_token_provider: Callable[[], Awaitable[str]], + prompt_manager: PromptManager, + reasoning_effort: Optional[str] = None, + ): + self.search_client = search_client + self.openai_client = openai_client + self.auth_helper = auth_helper + self.query_language = query_language + self.query_speller = query_speller + self.embedding_deployment = embedding_deployment + self.embedding_model = embedding_model + self.embedding_dimensions = embedding_dimensions + self.embedding_field = embedding_field + self.openai_host = openai_host + self.vision_endpoint = vision_endpoint + self.vision_token_provider = vision_token_provider + self.prompt_manager = prompt_manager + self.reasoning_effort = reasoning_effort + self.include_token_usage = True + + def build_filter(self, overrides: dict[str, Any], auth_claims: dict[str, Any]) -> Optional[str]: + include_category = overrides.get("include_category") + exclude_category = overrides.get("exclude_category") + security_filter = self.auth_helper.build_security_filters(overrides, auth_claims) + filters = [] + if include_category: + filters.append("category eq '{}'".format(include_category.replace("'", "''"))) + if exclude_category: + filters.append("category ne '{}'".format(exclude_category.replace("'", "''"))) + if security_filter: + filters.append(security_filter) + return None if len(filters) == 0 else " and ".join(filters) + + async def search( + self, + top: int, + query_text: Optional[str], + filter: Optional[str], + vectors: list[VectorQuery], + use_text_search: bool, + use_vector_search: bool, + use_semantic_ranker: bool, + use_semantic_captions: bool, + minimum_search_score: Optional[float] = None, + minimum_reranker_score: Optional[float] = None, + use_query_rewriting: Optional[bool] = None, + ) -> list[Document]: + search_text = query_text if use_text_search else "" + search_vectors = vectors if use_vector_search else [] + if use_semantic_ranker: + results = await self.search_client.search( + search_text=search_text, + filter=filter, + top=top, + query_caption="extractive|highlight-false" if use_semantic_captions else None, + query_rewrites="generative" if use_query_rewriting else None, + vector_queries=search_vectors, + query_type=QueryType.SEMANTIC, + query_language=self.query_language, + query_speller=self.query_speller, + semantic_configuration_name="default", + semantic_query=query_text, + ) + else: + results = await self.search_client.search( + search_text=search_text, + filter=filter, + top=top, + vector_queries=search_vectors, + ) + + documents = [] + async for page in results.by_page(): + async for document in page: + documents.append( + Document( + id=document.get("id"), + content=document.get("content"), + category=document.get("category"), + sourcepage=document.get("sourcepage"), + sourcefile=document.get("sourcefile"), + oids=document.get("oids"), + groups=document.get("groups"), + captions=cast(list[QueryCaptionResult], document.get("@search.captions")), + score=document.get("@search.score"), + reranker_score=document.get("@search.reranker_score"), + ) + ) + + qualified_documents = [ + doc + for doc in documents + if ( + (doc.score or 0) >= (minimum_search_score or 0) + and (doc.reranker_score or 0) >= (minimum_reranker_score or 0) + ) + ] + + return qualified_documents + + async def run_agentic_retrieval( + self, + messages: list[ChatCompletionMessageParam], + agent_client: KnowledgeAgentRetrievalClient, + search_index_name: str, + top: Optional[int] = None, + filter_add_on: Optional[str] = None, + minimum_reranker_score: Optional[float] = None, + max_docs_for_reranker: Optional[int] = None, + results_merge_strategy: Optional[str] = None, + ) -> tuple[KnowledgeAgentRetrievalResponse, list[Document]]: + # STEP 1: Invoke agentic retrieval + response = await agent_client.retrieve( + retrieval_request=KnowledgeAgentRetrievalRequest( + messages=[ + KnowledgeAgentMessage( + role=str(msg["role"]), content=[KnowledgeAgentMessageTextContent(text=str(msg["content"]))] + ) + for msg in messages + if msg["role"] != "system" + ], + target_index_params=[ + KnowledgeAgentIndexParams( + index_name=search_index_name, + reranker_threshold=minimum_reranker_score, + max_docs_for_reranker=max_docs_for_reranker, + filter_add_on=filter_add_on, + include_reference_source_data=True, + ) + ], + ) + ) + + # STEP 2: Generate a contextual and content specific answer using the search results and chat history + activities = response.activity + activity_mapping = ( + { + activity.id: activity.query.search if activity.query else "" + for activity in activities + if isinstance(activity, KnowledgeAgentSearchActivityRecord) + } + if activities + else {} + ) + + results = [] + if response and response.references: + if results_merge_strategy == "interleaved": + # Use interleaved reference order + references = sorted(response.references, key=lambda reference: int(reference.id)) + else: + # Default to descending strategy + references = response.references + for reference in references: + if isinstance(reference, KnowledgeAgentAzureSearchDocReference) and reference.source_data: + results.append( + Document( + id=reference.doc_key, + content=reference.source_data["content"], + sourcepage=reference.source_data["sourcepage"], + search_agent_query=activity_mapping[reference.activity_source], + ) + ) + if top and len(results) == top: + break + + return response, results + + def get_sources_content( + self, results: list[Document], use_semantic_captions: bool, use_image_citation: bool + ) -> list[str]: + + def nonewlines(s: str) -> str: + return s.replace("\n", " ").replace("\r", " ") + + if use_semantic_captions: + return [ + (self.get_citation((doc.sourcepage or ""), use_image_citation)) + + ": " + + nonewlines(" . ".join([cast(str, c.text) for c in (doc.captions or [])])) + for doc in results + ] + else: + return [ + (self.get_citation((doc.sourcepage or ""), use_image_citation)) + ": " + nonewlines(doc.content or "") + for doc in results + ] + + def get_citation(self, sourcepage: str, use_image_citation: bool) -> str: + if use_image_citation: + return sourcepage + else: + path, ext = os.path.splitext(sourcepage) + if ext.lower() == ".png": + page_idx = path.rfind("-") + page_number = int(path[page_idx + 1 :]) + return f"{path[:page_idx]}.pdf#page={page_number}" + + return sourcepage + + async def compute_text_embedding(self, q: str): + SUPPORTED_DIMENSIONS_MODEL = { + "text-embedding-ada-002": False, + "text-embedding-3-small": True, + "text-embedding-3-large": True, + } + + class ExtraArgs(TypedDict, total=False): + dimensions: int + + dimensions_args: ExtraArgs = ( + {"dimensions": self.embedding_dimensions} if SUPPORTED_DIMENSIONS_MODEL[self.embedding_model] else {} + ) + embedding = await self.openai_client.embeddings.create( + # Azure OpenAI takes the deployment name as the model name + model=self.embedding_deployment if self.embedding_deployment else self.embedding_model, + input=q, + **dimensions_args, + ) + query_vector = embedding.data[0].embedding + # This performs an oversampling due to how the search index was setup, + # so we do not need to explicitly pass in an oversampling parameter here + return VectorizedQuery(vector=query_vector, k_nearest_neighbors=50, fields=self.embedding_field) + + async def compute_image_embedding(self, q: str): + endpoint = urljoin(self.vision_endpoint, "computervision/retrieval:vectorizeText") + headers = {"Content-Type": "application/json"} + params = {"api-version": "2024-02-01", "model-version": "2023-04-15"} + data = {"text": q} + + headers["Authorization"] = "Bearer " + await self.vision_token_provider() + + async with aiohttp.ClientSession() as session: + async with session.post( + url=endpoint, params=params, headers=headers, json=data, raise_for_status=True + ) as response: + json = await response.json() + image_query_vector = json["vector"] + return VectorizedQuery(vector=image_query_vector, k_nearest_neighbors=50, fields="imageEmbedding") + + def get_system_prompt_variables(self, override_prompt: Optional[str]) -> dict[str, str]: + # Allows client to replace the entire prompt, or to inject into the existing prompt using >>> + if override_prompt is None: + return {} + elif override_prompt.startswith(">>>"): + return {"injected_prompt": override_prompt[3:]} + else: + return {"override_prompt": override_prompt} + + def get_response_token_limit(self, model: str, default_limit: int) -> int: + if model in self.GPT_REASONING_MODELS: + return self.RESPONSE_REASONING_DEFAULT_TOKEN_LIMIT + + return default_limit + + def create_chat_completion( + self, + chatgpt_deployment: Optional[str], + chatgpt_model: str, + messages: list[ChatCompletionMessageParam], + overrides: dict[str, Any], + response_token_limit: int, + should_stream: bool = False, + tools: Optional[list[ChatCompletionToolParam]] = None, + temperature: Optional[float] = None, + n: Optional[int] = None, + reasoning_effort: Optional[ChatCompletionReasoningEffort] = None, + ) -> Union[Awaitable[ChatCompletion], Awaitable[AsyncStream[ChatCompletionChunk]]]: + if chatgpt_model in self.GPT_REASONING_MODELS: + params: dict[str, Any] = { + # max_tokens is not supported + "max_completion_tokens": response_token_limit + } + + # Adjust parameters for reasoning models + supported_features = self.GPT_REASONING_MODELS[chatgpt_model] + if supported_features.streaming and should_stream: + params["stream"] = True + params["stream_options"] = {"include_usage": True} + params["reasoning_effort"] = reasoning_effort or overrides.get("reasoning_effort") or self.reasoning_effort + + else: + # Include parameters that may not be supported for reasoning models + params = { + "max_tokens": response_token_limit, + "temperature": temperature or overrides.get("temperature", 0.3), + } + if should_stream: + params["stream"] = True + params["stream_options"] = {"include_usage": True} + + params["tools"] = tools + + # Azure OpenAI takes the deployment name as the model name + return self.openai_client.chat.completions.create( + model=chatgpt_deployment if chatgpt_deployment else chatgpt_model, + messages=messages, + seed=overrides.get("seed", None), + n=n or 1, + **params, + ) + + def format_thought_step_for_chatcompletion( + self, + title: str, + messages: list[ChatCompletionMessageParam], + overrides: dict[str, Any], + model: str, + deployment: Optional[str], + usage: Optional[CompletionUsage] = None, + reasoning_effort: Optional[ChatCompletionReasoningEffort] = None, + ) -> ThoughtStep: + properties: dict[str, Any] = {"model": model} + if deployment: + properties["deployment"] = deployment + # Only add reasoning_effort setting if the model supports it + if model in self.GPT_REASONING_MODELS: + properties["reasoning_effort"] = reasoning_effort or overrides.get( + "reasoning_effort", self.reasoning_effort + ) + if usage: + properties["token_usage"] = TokenUsageProps.from_completion_usage(usage) + return ThoughtStep(title, messages, properties) + + async def run( + self, + messages: list[ChatCompletionMessageParam], + session_state: Any = None, + context: dict[str, Any] = {}, + ) -> dict[str, Any]: + raise NotImplementedError + + async def run_stream( + self, + messages: list[ChatCompletionMessageParam], + session_state: Any = None, + context: dict[str, Any] = {}, + ) -> AsyncGenerator[dict[str, Any], None]: raise NotImplementedError diff --git a/app/backend/approaches/chatapproach.py b/app/backend/approaches/chatapproach.py new file mode 100644 index 0000000000..346c9f3b0a --- /dev/null +++ b/app/backend/approaches/chatapproach.py @@ -0,0 +1,151 @@ +import json +import re +from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator, Awaitable +from typing import Any, Optional, Union, cast + +from openai import AsyncStream +from openai.types.chat import ( + ChatCompletion, + ChatCompletionChunk, + ChatCompletionMessageParam, +) + +from approaches.approach import ( + Approach, + ExtraInfo, +) + + +class ChatApproach(Approach, ABC): + + NO_RESPONSE = "0" + + @abstractmethod + async def run_until_final_call( + self, messages, overrides, auth_claims, should_stream + ) -> tuple[ExtraInfo, Union[Awaitable[ChatCompletion], Awaitable[AsyncStream[ChatCompletionChunk]]]]: + pass + + def get_search_query(self, chat_completion: ChatCompletion, user_query: str): + response_message = chat_completion.choices[0].message + + if response_message.tool_calls: + for tool in response_message.tool_calls: + if tool.type != "function": + continue + function = tool.function + if function.name == "search_sources": + arg = json.loads(function.arguments) + search_query = arg.get("search_query", self.NO_RESPONSE) + if search_query != self.NO_RESPONSE: + return search_query + elif query_text := response_message.content: + if query_text.strip() != self.NO_RESPONSE: + return query_text + return user_query + + def extract_followup_questions(self, content: Optional[str]): + if content is None: + return content, [] + return content.split("<<")[0], re.findall(r"<<([^>>]+)>>", content) + + async def run_without_streaming( + self, + messages: list[ChatCompletionMessageParam], + overrides: dict[str, Any], + auth_claims: dict[str, Any], + session_state: Any = None, + ) -> dict[str, Any]: + extra_info, chat_coroutine = await self.run_until_final_call( + messages, overrides, auth_claims, should_stream=False + ) + chat_completion_response: ChatCompletion = await cast(Awaitable[ChatCompletion], chat_coroutine) + content = chat_completion_response.choices[0].message.content + role = chat_completion_response.choices[0].message.role + if overrides.get("suggest_followup_questions"): + content, followup_questions = self.extract_followup_questions(content) + extra_info.followup_questions = followup_questions + # Assume last thought is for generating answer + if self.include_token_usage and extra_info.thoughts and chat_completion_response.usage: + extra_info.thoughts[-1].update_token_usage(chat_completion_response.usage) + chat_app_response = { + "message": {"content": content, "role": role}, + "context": extra_info, + "session_state": session_state, + } + return chat_app_response + + async def run_with_streaming( + self, + messages: list[ChatCompletionMessageParam], + overrides: dict[str, Any], + auth_claims: dict[str, Any], + session_state: Any = None, + ) -> AsyncGenerator[dict, None]: + extra_info, chat_coroutine = await self.run_until_final_call( + messages, overrides, auth_claims, should_stream=True + ) + chat_coroutine = cast(Awaitable[AsyncStream[ChatCompletionChunk]], chat_coroutine) + yield {"delta": {"role": "assistant"}, "context": extra_info, "session_state": session_state} + + followup_questions_started = False + followup_content = "" + async for event_chunk in await chat_coroutine: + # "2023-07-01-preview" API version has a bug where first response has empty choices + event = event_chunk.model_dump() # Convert pydantic model to dict + if event["choices"]: + # No usage during streaming + completion = { + "delta": { + "content": event["choices"][0]["delta"].get("content"), + "role": event["choices"][0]["delta"]["role"], + } + } + # if event contains << and not >>, it is start of follow-up question, truncate + content = completion["delta"].get("content") + content = content or "" # content may either not exist in delta, or explicitly be None + if overrides.get("suggest_followup_questions") and "<<" in content: + followup_questions_started = True + earlier_content = content[: content.index("<<")] + if earlier_content: + completion["delta"]["content"] = earlier_content + yield completion + followup_content += content[content.index("<<") :] + elif followup_questions_started: + followup_content += content + else: + yield completion + else: + # Final chunk at end of streaming should contain usage + # https://cookbook.openai.com/examples/how_to_stream_completions#4-how-to-get-token-usage-data-for-streamed-chat-completion-response + if event_chunk.usage and extra_info.thoughts and self.include_token_usage: + extra_info.thoughts[-1].update_token_usage(event_chunk.usage) + yield {"delta": {"role": "assistant"}, "context": extra_info, "session_state": session_state} + + if followup_content: + _, followup_questions = self.extract_followup_questions(followup_content) + yield { + "delta": {"role": "assistant"}, + "context": {"context": extra_info, "followup_questions": followup_questions}, + } + + async def run( + self, + messages: list[ChatCompletionMessageParam], + session_state: Any = None, + context: dict[str, Any] = {}, + ) -> dict[str, Any]: + overrides = context.get("overrides", {}) + auth_claims = context.get("auth_claims", {}) + return await self.run_without_streaming(messages, overrides, auth_claims, session_state) + + async def run_stream( + self, + messages: list[ChatCompletionMessageParam], + session_state: Any = None, + context: dict[str, Any] = {}, + ) -> AsyncGenerator[dict[str, Any], None]: + overrides = context.get("overrides", {}) + auth_claims = context.get("auth_claims", {}) + return self.run_with_streaming(messages, overrides, auth_claims, session_state) diff --git a/app/backend/approaches/chatreadretrieveread.py b/app/backend/approaches/chatreadretrieveread.py index 5c5d733b78..ed87976e3b 100644 --- a/app/backend/approaches/chatreadretrieveread.py +++ b/app/backend/approaches/chatreadretrieveread.py @@ -1,184 +1,281 @@ -from typing import Any, Sequence +from collections.abc import Awaitable +from typing import Any, Optional, Union, cast -import openai -from azure.search.documents import SearchClient -from azure.search.documents.models import QueryType +from azure.search.documents.agent.aio import KnowledgeAgentRetrievalClient +from azure.search.documents.aio import SearchClient +from azure.search.documents.models import VectorQuery +from openai import AsyncOpenAI, AsyncStream +from openai.types.chat import ( + ChatCompletion, + ChatCompletionChunk, + ChatCompletionMessageParam, + ChatCompletionToolParam, +) -from approaches.approach import Approach -from core.messagebuilder import MessageBuilder -from core.modelhelper import get_token_limit -from text import nonewlines +from approaches.approach import DataPoints, ExtraInfo, ThoughtStep +from approaches.chatapproach import ChatApproach +from approaches.promptmanager import PromptManager +from core.authentication import AuthenticationHelper -class ChatReadRetrieveReadApproach(Approach): - # Chat roles - SYSTEM = "system" - USER = "user" - ASSISTANT = "assistant" - +class ChatReadRetrieveReadApproach(ChatApproach): """ - Simple retrieve-then-read implementation, using the Cognitive Search and OpenAI APIs directly. It first retrieves - top documents from search, then constructs a prompt with them, and then uses OpenAI to generate an completion - (answer) with that prompt. + A multi-step approach that first uses OpenAI to turn the user's question into a search query, + then uses Azure AI Search to retrieve relevant documents, and then sends the conversation history, + original user question, and search results to OpenAI to generate a response. """ - system_message_chat_conversation = """Assistant helps the company employees with their healthcare plan questions, and questions about the employee handbook. Be brief in your answers. -Answer ONLY with the facts listed in the list of sources below. If there isn't enough information below, say you don't know. Do not generate answers that don't use the sources below. If asking a clarifying question to the user would help, ask the question. -For tabular information return it as an html table. Do not return markdown format. If the question is not in English, answer in the language used in the question. -Each source has a name followed by colon and the actual information, always include the source name for each fact you use in the response. Use square brackets to reference the source, e.g. [info1.txt]. Don't combine sources, list each source separately, e.g. [info1.txt][info2.pdf]. -{follow_up_questions_prompt} -{injected_prompt} -""" - follow_up_questions_prompt_content = """Generate three very brief follow-up questions that the user would likely ask next about their healthcare plan and employee handbook. -Use double angle brackets to reference the questions, e.g. <>. -Try not to repeat questions that have already been asked. -Only generate questions and do not generate any text before or after the questions, such as 'Next Questions'""" - - query_prompt_template = """Below is a history of the conversation so far, and a new question asked by the user that needs to be answered by searching in a knowledge base about employee healthcare plans and the employee handbook. -Generate a search query based on the conversation and the new question. -Do not include cited source filenames and document names e.g info.txt or doc.pdf in the search query terms. -Do not include any text inside [] or <<>> in the search query terms. -Do not include any special characters like '+'. -If the question is not in English, translate the question to English before generating the search query. -If you cannot generate a search query, return just the number 0. -""" - query_prompt_few_shots = [ - {'role' : USER, 'content' : 'What are my health plans?' }, - {'role' : ASSISTANT, 'content' : 'Show available health plans' }, - {'role' : USER, 'content' : 'does my plan cover cardio?' }, - {'role' : ASSISTANT, 'content' : 'Health plan cardio coverage' } - ] - - def __init__(self, search_client: SearchClient, chatgpt_deployment: str, chatgpt_model: str, embedding_deployment: str, sourcepage_field: str, content_field: str): + + def __init__( + self, + *, + search_client: SearchClient, + search_index_name: str, + agent_model: Optional[str], + agent_deployment: Optional[str], + agent_client: KnowledgeAgentRetrievalClient, + auth_helper: AuthenticationHelper, + openai_client: AsyncOpenAI, + chatgpt_model: str, + chatgpt_deployment: Optional[str], # Not needed for non-Azure OpenAI + embedding_deployment: Optional[str], # Not needed for non-Azure OpenAI or for retrieval_mode="text" + embedding_model: str, + embedding_dimensions: int, + embedding_field: str, + sourcepage_field: str, + content_field: str, + query_language: str, + query_speller: str, + prompt_manager: PromptManager, + reasoning_effort: Optional[str] = None, + ): self.search_client = search_client - self.chatgpt_deployment = chatgpt_deployment + self.search_index_name = search_index_name + self.agent_model = agent_model + self.agent_deployment = agent_deployment + self.agent_client = agent_client + self.openai_client = openai_client + self.auth_helper = auth_helper self.chatgpt_model = chatgpt_model + self.chatgpt_deployment = chatgpt_deployment self.embedding_deployment = embedding_deployment + self.embedding_model = embedding_model + self.embedding_dimensions = embedding_dimensions + self.embedding_field = embedding_field self.sourcepage_field = sourcepage_field self.content_field = content_field - self.chatgpt_token_limit = get_token_limit(chatgpt_model) - - def run(self, history: Sequence[dict[str, str]], overrides: dict[str, Any]) -> Any: - has_text = overrides.get("retrieval_mode") in ["text", "hybrid", None] - has_vector = overrides.get("retrieval_mode") in ["vectors", "hybrid", None] - use_semantic_captions = True if overrides.get("semantic_captions") and has_text else False - top = overrides.get("top") or 3 - exclude_category = overrides.get("exclude_category") or None - filter = "category ne '{}'".format(exclude_category.replace("'", "''")) if exclude_category else None + self.query_language = query_language + self.query_speller = query_speller + self.prompt_manager = prompt_manager + self.query_rewrite_prompt = self.prompt_manager.load_prompt("chat_query_rewrite.prompty") + self.query_rewrite_tools = self.prompt_manager.load_tools("chat_query_rewrite_tools.json") + self.answer_prompt = self.prompt_manager.load_prompt("chat_answer_question.prompty") + self.reasoning_effort = reasoning_effort + self.include_token_usage = True - user_q = 'Generate search query for: ' + history[-1]["user"] + async def run_until_final_call( + self, + messages: list[ChatCompletionMessageParam], + overrides: dict[str, Any], + auth_claims: dict[str, Any], + should_stream: bool = False, + ) -> tuple[ExtraInfo, Union[Awaitable[ChatCompletion], Awaitable[AsyncStream[ChatCompletionChunk]]]]: + use_agentic_retrieval = True if overrides.get("use_agentic_retrieval") else False + original_user_query = messages[-1]["content"] - # STEP 1: Generate an optimized keyword search query based on the chat history and the last question - messages = self.get_messages_from_history( - self.query_prompt_template, - self.chatgpt_model, - history, - user_q, - self.query_prompt_few_shots, - self.chatgpt_token_limit - len(user_q) + reasoning_model_support = self.GPT_REASONING_MODELS.get(self.chatgpt_model) + if reasoning_model_support and (not reasoning_model_support.streaming and should_stream): + raise Exception( + f"{self.chatgpt_model} does not support streaming. Please use a different model or disable streaming." ) + if use_agentic_retrieval: + extra_info = await self.run_agentic_retrieval_approach(messages, overrides, auth_claims) + else: + extra_info = await self.run_search_approach(messages, overrides, auth_claims) - chat_completion = openai.ChatCompletion.create( - deployment_id=self.chatgpt_deployment, - model=self.chatgpt_model, - messages=messages, - temperature=0.0, - max_tokens=32, - n=1) + messages = self.prompt_manager.render_prompt( + self.answer_prompt, + self.get_system_prompt_variables(overrides.get("prompt_template")) + | { + "include_follow_up_questions": bool(overrides.get("suggest_followup_questions")), + "past_messages": messages[:-1], + "user_query": original_user_query, + "text_sources": extra_info.data_points.text, + }, + ) - query_text = chat_completion.choices[0].message.content - if query_text.strip() == "0": - query_text = history[-1]["user"] # Use the last user input if we failed to generate a better query + chat_coroutine = cast( + Union[Awaitable[ChatCompletion], Awaitable[AsyncStream[ChatCompletionChunk]]], + self.create_chat_completion( + self.chatgpt_deployment, + self.chatgpt_model, + messages, + overrides, + self.get_response_token_limit(self.chatgpt_model, 1024), + should_stream, + ), + ) + extra_info.thoughts.append( + self.format_thought_step_for_chatcompletion( + title="Prompt to generate answer", + messages=messages, + overrides=overrides, + model=self.chatgpt_model, + deployment=self.chatgpt_deployment, + usage=None, + ) + ) + return (extra_info, chat_coroutine) - # STEP 2: Retrieve relevant documents from the search index with the GPT optimized query + async def run_search_approach( + self, messages: list[ChatCompletionMessageParam], overrides: dict[str, Any], auth_claims: dict[str, Any] + ): + use_text_search = overrides.get("retrieval_mode") in ["text", "hybrid", None] + use_vector_search = overrides.get("retrieval_mode") in ["vectors", "hybrid", None] + use_semantic_ranker = True if overrides.get("semantic_ranker") else False + use_semantic_captions = True if overrides.get("semantic_captions") else False + use_query_rewriting = True if overrides.get("query_rewriting") else False + top = overrides.get("top", 3) + minimum_search_score = overrides.get("minimum_search_score", 0.0) + minimum_reranker_score = overrides.get("minimum_reranker_score", 0.0) + search_index_filter = self.build_filter(overrides, auth_claims) - # If retrieval mode includes vectors, compute an embedding for the query - if has_vector: - query_vector = openai.Embedding.create(engine=self.embedding_deployment, input=query_text)["data"][0]["embedding"] - else: - query_vector = None - - # Only keep the text query if the retrieval mode uses text, otherwise drop it - if not has_text: - query_text = None - - # Use semantic L2 reranker if requested and if retrieval mode is text or hybrid (vectors + text) - if overrides.get("semantic_ranker") and has_text: - r = self.search_client.search(query_text, - filter=filter, - query_type=QueryType.SEMANTIC, - query_language="en-us", - query_speller="lexicon", - semantic_configuration_name="default", - top=top, - query_caption="extractive|highlight-false" if use_semantic_captions else None, - vector=query_vector, - top_k=50 if query_vector else None, - vector_fields="embedding" if query_vector else None) - else: - r = self.search_client.search(query_text, - filter=filter, - top=top, - vector=query_vector, - top_k=50 if query_vector else None, - vector_fields="embedding" if query_vector else None) - if use_semantic_captions: - results = [doc[self.sourcepage_field] + ": " + nonewlines(" . ".join([c.text for c in doc['@search.captions']])) for doc in r] - else: - results = [doc[self.sourcepage_field] + ": " + nonewlines(doc[self.content_field]) for doc in r] - content = "\n".join(results) + original_user_query = messages[-1]["content"] + if not isinstance(original_user_query, str): + raise ValueError("The most recent message content must be a string.") - follow_up_questions_prompt = self.follow_up_questions_prompt_content if overrides.get("suggest_followup_questions") else "" + query_messages = self.prompt_manager.render_prompt( + self.query_rewrite_prompt, {"user_query": original_user_query, "past_messages": messages[:-1]} + ) + tools: list[ChatCompletionToolParam] = self.query_rewrite_tools - # STEP 3: Generate a contextual and content specific answer using the search results and chat history + # STEP 1: Generate an optimized keyword search query based on the chat history and the last question - # Allow client to replace the entire prompt, or to inject into the exiting prompt using >>> - prompt_override = overrides.get("prompt_override") - if prompt_override is None: - system_message = self.system_message_chat_conversation.format(injected_prompt="", follow_up_questions_prompt=follow_up_questions_prompt) - elif prompt_override.startswith(">>>"): - system_message = self.system_message_chat_conversation.format(injected_prompt=prompt_override[3:] + "\n", follow_up_questions_prompt=follow_up_questions_prompt) - else: - system_message = prompt_override.format(follow_up_questions_prompt=follow_up_questions_prompt) - - messages = self.get_messages_from_history( - system_message + "\n\nSources:\n" + content, - self.chatgpt_model, - history, - history[-1]["user"], - max_tokens=self.chatgpt_token_limit) - - chat_completion = openai.ChatCompletion.create( - deployment_id=self.chatgpt_deployment, - model=self.chatgpt_model, - messages=messages, - temperature=overrides.get("temperature") or 0.7, - max_tokens=1024, - n=1) + chat_completion = cast( + ChatCompletion, + await self.create_chat_completion( + self.chatgpt_deployment, + self.chatgpt_model, + messages=query_messages, + overrides=overrides, + response_token_limit=self.get_response_token_limit( + self.chatgpt_model, 100 + ), # Setting too low risks malformed JSON, setting too high may affect performance + temperature=0.0, # Minimize creativity for search query generation + tools=tools, + reasoning_effort="low", # Minimize reasoning for search query generation + ), + ) - chat_content = chat_completion.choices[0].message.content + query_text = self.get_search_query(chat_completion, original_user_query) - msg_to_display = '\n\n'.join([str(message) for message in messages]) + # STEP 2: Retrieve relevant documents from the search index with the GPT optimized query - return {"data_points": results, "answer": chat_content, "thoughts": f"Searched for:
{query_text}

Conversations:
" + msg_to_display.replace('\n', '
')} + # If retrieval mode includes vectors, compute an embedding for the query + vectors: list[VectorQuery] = [] + if use_vector_search: + vectors.append(await self.compute_text_embedding(query_text)) - def get_messages_from_history(self, system_prompt: str, model_id: str, history: Sequence[dict[str, str]], user_conv: str, few_shots = [], max_tokens: int = 4096) -> []: - message_builder = MessageBuilder(system_prompt, model_id) + results = await self.search( + top, + query_text, + search_index_filter, + vectors, + use_text_search, + use_vector_search, + use_semantic_ranker, + use_semantic_captions, + minimum_search_score, + minimum_reranker_score, + use_query_rewriting, + ) - # Add examples to show the chat what responses we want. It will try to mimic any responses and make sure they match the rules laid out in the system message. - for shot in few_shots: - message_builder.append_message(shot.get('role'), shot.get('content')) + # STEP 3: Generate a contextual and content specific answer using the search results and chat history + text_sources = self.get_sources_content(results, use_semantic_captions, use_image_citation=False) - user_content = user_conv - append_index = len(few_shots) + 1 + extra_info = ExtraInfo( + DataPoints(text=text_sources), + thoughts=[ + self.format_thought_step_for_chatcompletion( + title="Prompt to generate search query", + messages=query_messages, + overrides=overrides, + model=self.chatgpt_model, + deployment=self.chatgpt_deployment, + usage=chat_completion.usage, + reasoning_effort="low", + ), + ThoughtStep( + "Search using generated search query", + query_text, + { + "use_semantic_captions": use_semantic_captions, + "use_semantic_ranker": use_semantic_ranker, + "use_query_rewriting": use_query_rewriting, + "top": top, + "filter": search_index_filter, + "use_vector_search": use_vector_search, + "use_text_search": use_text_search, + }, + ), + ThoughtStep( + "Search results", + [result.serialize_for_results() for result in results], + ), + ], + ) + return extra_info - message_builder.append_message(self.USER, user_content, index=append_index) + async def run_agentic_retrieval_approach( + self, + messages: list[ChatCompletionMessageParam], + overrides: dict[str, Any], + auth_claims: dict[str, Any], + ): + minimum_reranker_score = overrides.get("minimum_reranker_score", 0) + search_index_filter = self.build_filter(overrides, auth_claims) + top = overrides.get("top", 3) + max_subqueries = overrides.get("max_subqueries", 10) + results_merge_strategy = overrides.get("results_merge_strategy", "interleaved") + # 50 is the amount of documents that the reranker can process per query + max_docs_for_reranker = max_subqueries * 50 + + response, results = await self.run_agentic_retrieval( + messages=messages, + agent_client=self.agent_client, + search_index_name=self.search_index_name, + top=top, + filter_add_on=search_index_filter, + minimum_reranker_score=minimum_reranker_score, + max_docs_for_reranker=max_docs_for_reranker, + results_merge_strategy=results_merge_strategy, + ) - for h in reversed(history[:-1]): - if h.get("bot"): - message_builder.append_message(self.ASSISTANT, h.get('bot'), index=append_index) - message_builder.append_message(self.USER, h.get('user'), index=append_index) - if message_builder.token_length > max_tokens: - break + text_sources = self.get_sources_content(results, use_semantic_captions=False, use_image_citation=False) - messages = message_builder.messages - return messages + extra_info = ExtraInfo( + DataPoints(text=text_sources), + thoughts=[ + ThoughtStep( + "Use agentic retrieval", + messages, + { + "reranker_threshold": minimum_reranker_score, + "max_docs_for_reranker": max_docs_for_reranker, + "results_merge_strategy": results_merge_strategy, + "filter": search_index_filter, + }, + ), + ThoughtStep( + f"Agentic retrieval results (top {top})", + [result.serialize_for_results() for result in results], + { + "query_plan": ( + [activity.as_dict() for activity in response.activity] if response.activity else None + ), + "model": self.agent_model, + "deployment": self.agent_deployment, + }, + ), + ], + ) + return extra_info diff --git a/app/backend/approaches/chatreadretrievereadvision.py b/app/backend/approaches/chatreadretrievereadvision.py new file mode 100644 index 0000000000..f8aaf3c37d --- /dev/null +++ b/app/backend/approaches/chatreadretrievereadvision.py @@ -0,0 +1,222 @@ +from collections.abc import Awaitable +from typing import Any, Callable, Optional, Union, cast + +from azure.search.documents.aio import SearchClient +from azure.storage.blob.aio import ContainerClient +from openai import AsyncOpenAI, AsyncStream +from openai.types.chat import ( + ChatCompletion, + ChatCompletionChunk, + ChatCompletionMessageParam, + ChatCompletionToolParam, +) + +from approaches.approach import DataPoints, ExtraInfo, ThoughtStep +from approaches.chatapproach import ChatApproach +from approaches.promptmanager import PromptManager +from core.authentication import AuthenticationHelper +from core.imageshelper import fetch_image + + +class ChatReadRetrieveReadVisionApproach(ChatApproach): + """ + A multi-step approach that first uses OpenAI to turn the user's question into a search query, + then uses Azure AI Search to retrieve relevant documents, and then sends the conversation history, + original user question, and search results to OpenAI to generate a response. + """ + + def __init__( + self, + *, + search_client: SearchClient, + blob_container_client: ContainerClient, + openai_client: AsyncOpenAI, + auth_helper: AuthenticationHelper, + chatgpt_model: str, + chatgpt_deployment: Optional[str], # Not needed for non-Azure OpenAI + gpt4v_deployment: Optional[str], # Not needed for non-Azure OpenAI + gpt4v_model: str, + embedding_deployment: Optional[str], # Not needed for non-Azure OpenAI or for retrieval_mode="text" + embedding_model: str, + embedding_dimensions: int, + embedding_field: str, + sourcepage_field: str, + content_field: str, + query_language: str, + query_speller: str, + vision_endpoint: str, + vision_token_provider: Callable[[], Awaitable[str]], + prompt_manager: PromptManager, + ): + self.search_client = search_client + self.blob_container_client = blob_container_client + self.openai_client = openai_client + self.auth_helper = auth_helper + self.chatgpt_model = chatgpt_model + self.chatgpt_deployment = chatgpt_deployment + self.gpt4v_deployment = gpt4v_deployment + self.gpt4v_model = gpt4v_model + self.embedding_deployment = embedding_deployment + self.embedding_model = embedding_model + self.embedding_dimensions = embedding_dimensions + self.embedding_field = embedding_field + self.sourcepage_field = sourcepage_field + self.content_field = content_field + self.query_language = query_language + self.query_speller = query_speller + self.vision_endpoint = vision_endpoint + self.vision_token_provider = vision_token_provider + self.prompt_manager = prompt_manager + self.query_rewrite_prompt = self.prompt_manager.load_prompt("chat_query_rewrite.prompty") + self.query_rewrite_tools = self.prompt_manager.load_tools("chat_query_rewrite_tools.json") + self.answer_prompt = self.prompt_manager.load_prompt("chat_answer_question_vision.prompty") + # Currently disabled due to issues with rendering token usage in the UI + self.include_token_usage = False + + async def run_until_final_call( + self, + messages: list[ChatCompletionMessageParam], + overrides: dict[str, Any], + auth_claims: dict[str, Any], + should_stream: bool = False, + ) -> tuple[ExtraInfo, Union[Awaitable[ChatCompletion], Awaitable[AsyncStream[ChatCompletionChunk]]]]: + seed = overrides.get("seed", None) + use_text_search = overrides.get("retrieval_mode") in ["text", "hybrid", None] + use_vector_search = overrides.get("retrieval_mode") in ["vectors", "hybrid", None] + use_semantic_ranker = True if overrides.get("semantic_ranker") else False + use_query_rewriting = True if overrides.get("query_rewriting") else False + use_semantic_captions = True if overrides.get("semantic_captions") else False + top = overrides.get("top", 3) + minimum_search_score = overrides.get("minimum_search_score", 0.0) + minimum_reranker_score = overrides.get("minimum_reranker_score", 0.0) + filter = self.build_filter(overrides, auth_claims) + + vector_fields = overrides.get("vector_fields", "textAndImageEmbeddings") + send_text_to_gptvision = overrides.get("gpt4v_input") in ["textAndImages", "texts", None] + send_images_to_gptvision = overrides.get("gpt4v_input") in ["textAndImages", "images", None] + + original_user_query = messages[-1]["content"] + if not isinstance(original_user_query, str): + raise ValueError("The most recent message content must be a string.") + + # Use prompty to prepare the query prompt + query_messages = self.prompt_manager.render_prompt( + self.query_rewrite_prompt, {"user_query": original_user_query, "past_messages": messages[:-1]} + ) + tools: list[ChatCompletionToolParam] = self.query_rewrite_tools + + # STEP 1: Generate an optimized keyword search query based on the chat history and the last question + chat_completion: ChatCompletion = await self.openai_client.chat.completions.create( + messages=query_messages, + # Azure OpenAI takes the deployment name as the model name + model=self.chatgpt_deployment if self.chatgpt_deployment else self.chatgpt_model, + temperature=0.0, # Minimize creativity for search query generation + max_tokens=100, + n=1, + tools=tools, + seed=seed, + ) + + query_text = self.get_search_query(chat_completion, original_user_query) + + # STEP 2: Retrieve relevant documents from the search index with the GPT optimized query + + # If retrieval mode includes vectors, compute an embedding for the query + vectors = [] + if use_vector_search: + if vector_fields == "textEmbeddingOnly" or vector_fields == "textAndImageEmbeddings": + vectors.append(await self.compute_text_embedding(query_text)) + if vector_fields == "imageEmbeddingOnly" or vector_fields == "textAndImageEmbeddings": + vectors.append(await self.compute_image_embedding(query_text)) + + results = await self.search( + top, + query_text, + filter, + vectors, + use_text_search, + use_vector_search, + use_semantic_ranker, + use_semantic_captions, + minimum_search_score, + minimum_reranker_score, + use_query_rewriting, + ) + + # STEP 3: Generate a contextual and content specific answer using the search results and chat history + text_sources = [] + image_sources = [] + if send_text_to_gptvision: + text_sources = self.get_sources_content(results, use_semantic_captions, use_image_citation=True) + if send_images_to_gptvision: + for result in results: + url = await fetch_image(self.blob_container_client, result) + if url: + image_sources.append(url) + + messages = self.prompt_manager.render_prompt( + self.answer_prompt, + self.get_system_prompt_variables(overrides.get("prompt_template")) + | { + "include_follow_up_questions": bool(overrides.get("suggest_followup_questions")), + "past_messages": messages[:-1], + "user_query": original_user_query, + "text_sources": text_sources, + "image_sources": image_sources, + }, + ) + + extra_info = ExtraInfo( + DataPoints(text=text_sources, images=image_sources), + [ + ThoughtStep( + "Prompt to generate search query", + query_messages, + ( + {"model": self.chatgpt_model, "deployment": self.chatgpt_deployment} + if self.chatgpt_deployment + else {"model": self.chatgpt_model} + ), + ), + ThoughtStep( + "Search using generated search query", + query_text, + { + "use_semantic_captions": use_semantic_captions, + "use_semantic_ranker": use_semantic_ranker, + "use_query_rewriting": use_query_rewriting, + "top": top, + "filter": filter, + "vector_fields": vector_fields, + "use_text_search": use_text_search, + }, + ), + ThoughtStep( + "Search results", + [result.serialize_for_results() for result in results], + ), + ThoughtStep( + "Prompt to generate answer", + messages, + ( + {"model": self.gpt4v_model, "deployment": self.gpt4v_deployment} + if self.gpt4v_deployment + else {"model": self.gpt4v_model} + ), + ), + ], + ) + + chat_coroutine = cast( + Union[Awaitable[ChatCompletion], Awaitable[AsyncStream[ChatCompletionChunk]]], + self.openai_client.chat.completions.create( + model=self.gpt4v_deployment if self.gpt4v_deployment else self.gpt4v_model, + messages=messages, + temperature=overrides.get("temperature", 0.3), + max_tokens=1024, + n=1, + stream=should_stream, + seed=seed, + ), + ) + return (extra_info, chat_coroutine) diff --git a/app/backend/approaches/promptmanager.py b/app/backend/approaches/promptmanager.py new file mode 100644 index 0000000000..82941b4176 --- /dev/null +++ b/app/backend/approaches/promptmanager.py @@ -0,0 +1,31 @@ +import json +import pathlib + +import prompty +from openai.types.chat import ChatCompletionMessageParam + + +class PromptManager: + + def load_prompt(self, path: str): + raise NotImplementedError + + def load_tools(self, path: str): + raise NotImplementedError + + def render_prompt(self, prompt, data) -> list[ChatCompletionMessageParam]: + raise NotImplementedError + + +class PromptyManager(PromptManager): + + PROMPTS_DIRECTORY = pathlib.Path(__file__).parent / "prompts" + + def load_prompt(self, path: str): + return prompty.load(self.PROMPTS_DIRECTORY / path) + + def load_tools(self, path: str): + return json.loads(open(self.PROMPTS_DIRECTORY / path).read()) + + def render_prompt(self, prompt, data) -> list[ChatCompletionMessageParam]: + return prompty.prepare(prompt, data) diff --git a/app/backend/approaches/prompts/ask_answer_question.prompty b/app/backend/approaches/prompts/ask_answer_question.prompty new file mode 100644 index 0000000000..7ff73d232f --- /dev/null +++ b/app/backend/approaches/prompts/ask_answer_question.prompty @@ -0,0 +1,42 @@ +--- +name: Ask +description: Answer a single question (with no chat history) using solely text sources. +model: + api: chat +sample: + user_query: What does a product manager do? + text_sources: + - "role_library.pdf#page=29: The Manager of Product Management will collaborate with internal teams, such as engineering, sales, marketing, and finance, as well as external partners, suppliers, and customers to ensure successful product execution. Responsibilities: · Lead the product management team and provide guidance on product strategy, design, development, and launch. · Develop and implement product life-cycle management processes. · Monitor and analyze industry trends to identify opportunities for new products. · Develop product marketing plans and go-to-market strategies. · Research customer needs and develop customer-centric product roadmaps. · Collaborate with internal teams to ensure product execution and successful launch. · Develop pricing strategies and cost models. · Oversee product portfolio and performance metrics. · Manage product development budget. · Analyze product performance and customer feedback to identify areas for improvement. Qualifications: · Bachelor's degree in business, engineering, or a related field. · At least 5 years of experience in product management. · Proven track record of successful product launches." + - "role_library.pdf#page=23: Company: Contoso Electronics Location: Anywhere Job Type: Full-Time Salary: Competitive, commensurate with experience Job Summary: The Senior Manager of Product Management will be responsible for leading the product management team at Contoso Electronics. This role includes developing strategies, plans and objectives for the product management team and managing the day-to-day operations. The Senior Manager of Product Management will be responsible for the successful launch of new products and the optimization of existing products. Responsibilities: · Develop and implement product management strategies, plans and objectives to maximize team performance. · Analyze competitive landscape and market trends to develop product strategies. · Lead the product management team in the development of product plans, roadmaps and launch plans. · Monitor the performance of product management team, analyze results and implement corrective action as needed. · Manage the product lifecycle, including product development, launch, and end of life. · Ensure product features and benefits meet customer requirements. · Establish and maintain relationships with key customers, partners, and vendors." + - "role_library.pdf#page=28: · 7+ years of experience in research and development in the electronics sector. · Proven track record of successfully designing, testing, and optimizing products. · Experience leading a team of researchers and engineers. · Excellent problem-solving and analytical skills. · Ability to work in a fast-paced environment and meet tight deadlines.· Knowledge of industry trends, technologies, and regulations. · Excellent communication and presentation skills. Manager of Product Management Job Title: Manager of Product Management, Contoso Electronics Job Summary: The Manager of Product Management is responsible for overseeing the product management team, driving product development and marketing strategy for Contoso Electronics. This individual will be accountable for the successful launch of new products and the implementation of product life-cycle management processes. The Manager of Product Management will collaborate with internal teams, such as engineering, sales, marketing, and finance, as well as external partners, suppliers, and customers to ensure successful product execution." +--- +system: +{% if override_prompt %} +{{ override_prompt }} +{% else %} +You are an intelligent assistant helping Contoso Inc employees with their healthcare plan questions and employee handbook questions. +Use 'you' to refer to the individual asking the questions even if they ask with 'I'. +Answer the following question using only the data provided in the sources below. +Each source has a name followed by colon and the actual information, always include the source name for each fact you use in the response. +If you cannot answer using the sources below, say you don't know. Use below example to answer +{{ injected_prompt }} +{% endif %} + +user: +What is the deductible for the employee plan for a visit to Overlake in Bellevue? + +Sources: +info1.txt: deductibles depend on whether you are in-network or out-of-network. In-network deductibles are $500 for employee and $1000 for family. Out-of-network deductibles are $1000 for employee and $2000 for family. +info2.pdf: Overlake is in-network for the employee plan. +info3.pdf: Overlake is the name of the area that includes a park and ride near Bellevue. +info4.pdf: In-network institutions include Overlake, Swedish and others in the region. + +assistant: +In-network deductibles are $500 for employee and $1000 for family [info1.txt] and Overlake is in-network for the employee plan [info2.pdf][info4.pdf]. + +user: +{{ user_query }} +Sources: +{% for text_source in text_sources %} +{{ text_source }} +{% endfor %} diff --git a/app/backend/approaches/prompts/ask_answer_question_vision.prompty b/app/backend/approaches/prompts/ask_answer_question_vision.prompty new file mode 100644 index 0000000000..25ab9656a7 --- /dev/null +++ b/app/backend/approaches/prompts/ask_answer_question_vision.prompty @@ -0,0 +1,31 @@ +--- +name: Ask with vision +description: Answer a single question (with no chat history) using both text and image sources. +model: + api: chat +--- +system: +{% if override_prompt %} +{{ override_prompt }} +{% else %} +You are an intelligent assistant helping analyze the Annual Financial Report of Contoso Ltd., The documents contain text, graphs, tables and images. +Each image source has the file name in the top left corner of the image with coordinates (10,10) pixels and is in the format SourceFileName:. +Each text source starts in a new line and has the file name followed by colon and the actual information. +Always include the source name from the image or text for each fact you use in the response in the format: [filename]. +Answer the following question using only the data provided in the sources below. +The text and image source can be the same file name, don't use the image title when citing the image source, only use the file name as mentioned. +If you cannot answer using the sources below, say you don't know. Return just the answer without any input texts. +{{ injected_prompt }} +{% endif %} + +user: +{{ user_query }} +{% for image_source in image_sources %} +![Image]({{image_source}}) +{% endfor %} +{% if text_sources is defined %} +Sources: +{% for text_source in text_sources %} +{{ text_source }} +{% endfor %} +{% endif %} diff --git a/app/backend/approaches/prompts/chat_answer_question.prompty b/app/backend/approaches/prompts/chat_answer_question.prompty new file mode 100644 index 0000000000..3dcb05ae21 --- /dev/null +++ b/app/backend/approaches/prompts/chat_answer_question.prompty @@ -0,0 +1,51 @@ +--- +name: Chat +description: Answer a question (with chat history) using solely text sources. +model: + api: chat +sample: + user_query: What does a product manager do that a CEO doesn't? + include_follow_up_questions: true + past_messages: + - role: user + content: "What does a CEO do?" + - role: assistant + content: "A CEO, or Chief Executive Officer, is responsible for providing strategic direction and oversight to a company to ensure its long-term success and profitability. They develop and implement strategies and objectives for financial success and growth, provide guidance to the executive team, manage day-to-day operations, ensure compliance with laws and regulations, develop and maintain relationships with stakeholders, monitor industry trends, and represent the company in public events 12. [role_library.pdf#page=1][role_library.pdf#page=3]" + text_sources: + - "role_library.pdf#page=29: The Manager of Product Management will collaborate with internal teams, such as engineering, sales, marketing, and finance, as well as external partners, suppliers, and customers to ensure successful product execution. Responsibilities: · Lead the product management team and provide guidance on product strategy, design, development, and launch. · Develop and implement product life-cycle management processes. · Monitor and analyze industry trends to identify opportunities for new products. · Develop product marketing plans and go-to-market strategies. · Research customer needs and develop customer-centric product roadmaps. · Collaborate with internal teams to ensure product execution and successful launch. · Develop pricing strategies and cost models. · Oversee product portfolio and performance metrics. · Manage product development budget. · Analyze product performance and customer feedback to identify areas for improvement. Qualifications: · Bachelor's degree in business, engineering, or a related field. · At least 5 years of experience in product management. · Proven track record of successful product launches." + - "role_library.pdf#page=23: Company: Contoso Electronics Location: Anywhere Job Type: Full-Time Salary: Competitive, commensurate with experience Job Summary: The Senior Manager of Product Management will be responsible for leading the product management team at Contoso Electronics. This role includes developing strategies, plans and objectives for the product management team and managing the day-to-day operations. The Senior Manager of Product Management will be responsible for the successful launch of new products and the optimization of existing products. Responsibilities: · Develop and implement product management strategies, plans and objectives to maximize team performance. · Analyze competitive landscape and market trends to develop product strategies. · Lead the product management team in the development of product plans, roadmaps and launch plans. · Monitor the performance of product management team, analyze results and implement corrective action as needed. · Manage the product lifecycle, including product development, launch, and end of life. · Ensure product features and benefits meet customer requirements. · Establish and maintain relationships with key customers, partners, and vendors." + - "role_library.pdf#page=28: · 7+ years of experience in research and development in the electronics sector. · Proven track record of successfully designing, testing, and optimizing products. · Experience leading a team of researchers and engineers. · Excellent problem-solving and analytical skills. · Ability to work in a fast-paced environment and meet tight deadlines.· Knowledge of industry trends, technologies, and regulations. · Excellent communication and presentation skills. Manager of Product Management Job Title: Manager of Product Management, Contoso Electronics Job Summary: The Manager of Product Management is responsible for overseeing the product management team, driving product development and marketing strategy for Contoso Electronics. This individual will be accountable for the successful launch of new products and the implementation of product life-cycle management processes. The Manager of Product Management will collaborate with internal teams, such as engineering, sales, marketing, and finance, as well as external partners, suppliers, and customers to ensure successful product execution." +--- +system: +{% if override_prompt %} +{{ override_prompt }} +{% else %} +Assistant helps the company employees with their healthcare plan questions, and questions about the employee handbook. Be brief in your answers. +Answer ONLY with the facts listed in the list of sources below. If there isn't enough information below, say you don't know. Do not generate answers that don't use the sources below. If asking a clarifying question to the user would help, ask the question. +If the question is not in English, answer in the language used in the question. +Each source has a name followed by colon and the actual information, always include the source name for each fact you use in the response. Use square brackets to reference the source, for example [info1.txt]. Don't combine sources, list each source separately, for example [info1.txt][info2.pdf]. +{{ injected_prompt }} +{% endif %} + +{% if include_follow_up_questions %} +Generate 3 very brief follow-up questions that the user would likely ask next. +Enclose the follow-up questions in double angle brackets. Example: +<> +<> +<> +Do not repeat questions that have already been asked. +Make sure the last question ends with ">>". +{% endif %} + +{% for message in past_messages %} +{{ message["role"] }}: +{{ message["content"] }} +{% endfor %} + +user: +{{ user_query }} + +Sources: +{% for text_source in text_sources %} +{{ text_source }} +{% endfor %} diff --git a/app/backend/approaches/prompts/chat_answer_question_vision.prompty b/app/backend/approaches/prompts/chat_answer_question_vision.prompty new file mode 100644 index 0000000000..58b3624121 --- /dev/null +++ b/app/backend/approaches/prompts/chat_answer_question_vision.prompty @@ -0,0 +1,48 @@ +--- +name: Chat with vision +description: Answer a question (with chat history) using both text and image sources. +model: + api: chat +--- +system: +{% if override_prompt %} +{{ override_prompt }} +{% else %} +You are an intelligent assistant helping analyze the Annual Financial Report of Contoso Ltd., The documents contain text, graphs, tables and images. +Each image source has the file name in the top left corner of the image with coordinates (10,10) pixels and is in the format SourceFileName: +Each text source starts in a new line and has the file name followed by colon and the actual information +Always include the source name from the image or text for each fact you use in the response in the format: [filename] +Answer the following question using only the data provided in the sources below. +If asking a clarifying question to the user would help, ask the question. +Be brief in your answers. +The text and image source can be the same file name, don't use the image title when citing the image source, only use the file name as mentioned +If you cannot answer using the sources below, say you don't know. Return just the answer without any input texts. +{{injected_prompt}} +{% endif %} + +{% if include_follow_up_questions %} +Generate 3 very brief follow-up questions that the user would likely ask next. +Enclose the follow-up questions in double angle brackets. Example: +<> +<> +<> +Do not repeat questions that have already been asked. +Make sure the last question ends with ">>". +{% endif %} + +{% for message in past_messages %} +{{ message["role"] }}: +{{ message["content"] }} +{% endfor %} + +user: +{{ user_query }} +{% for image_source in image_sources %} +![Image]({{image_source}}) +{% endfor %} +{% if text_sources is defined %} +Sources: +{% for text_source in text_sources %} +{{ text_source }} +{% endfor %} +{% endif %} diff --git a/app/backend/approaches/prompts/chat_query_rewrite.prompty b/app/backend/approaches/prompts/chat_query_rewrite.prompty new file mode 100644 index 0000000000..545b3f5b8c --- /dev/null +++ b/app/backend/approaches/prompts/chat_query_rewrite.prompty @@ -0,0 +1,44 @@ +--- +name: Rewrite RAG query +description: Suggest the optimal search query based on the user's query, examples, and chat history. +model: + api: chat + parameters: + tools: ${file:chat_query_rewrite_tools.json} +sample: + user_query: Does it include hearing? + past_messages: + - role: user + content: "What is included in my Northwind Health Plus plan that is not in standard?" + - role: assistant + content: "The Northwind Health Plus plan includes coverage for emergency services, mental health and substance abuse coverage, and out-of-network services, which are not included in the Northwind Standard plan. [Benefit_Options.pdf#page=3]" +--- +system: +Below is a history of the conversation so far, and a new question asked by the user that needs to be answered by searching in a knowledge base. +You have access to Azure AI Search index with 100's of documents. +Generate a search query based on the conversation and the new question. +Do not include cited source filenames and document names e.g. info.txt or doc.pdf in the search query terms. +Do not include any text inside [] or <<>> in the search query terms. +Do not include any special characters like '+'. +If the question is not in English, translate the question to English before generating the search query. +If you cannot generate a search query, return just the number 0. + +user: +How did crypto do last year? + +assistant: +Summarize Cryptocurrency Market Dynamics from last year + +user: +What are my health plans? + +assistant: +Show available health plans + +{% for message in past_messages %} +{{ message["role"] }}: +{{ message["content"] }} +{% endfor %} + +user: +Generate search query for: {{ user_query }} diff --git a/app/backend/approaches/prompts/chat_query_rewrite_tools.json b/app/backend/approaches/prompts/chat_query_rewrite_tools.json new file mode 100644 index 0000000000..cf1743483c --- /dev/null +++ b/app/backend/approaches/prompts/chat_query_rewrite_tools.json @@ -0,0 +1,17 @@ +[{ + "type": "function", + "function": { + "name": "search_sources", + "description": "Retrieve sources from the Azure AI Search index", + "parameters": { + "type": "object", + "properties": { + "search_query": { + "type": "string", + "description": "Query string to retrieve documents from azure search eg: 'Health care plan'" + } + }, + "required": ["search_query"] + } + } +}] diff --git a/app/backend/approaches/readdecomposeask.py b/app/backend/approaches/readdecomposeask.py deleted file mode 100644 index 4c22cef2ec..0000000000 --- a/app/backend/approaches/readdecomposeask.py +++ /dev/null @@ -1,234 +0,0 @@ -import re -from typing import Any, List, Optional - -import openai -from azure.search.documents import SearchClient -from azure.search.documents.models import QueryType -from langchain.agents import AgentExecutor, Tool -from langchain.agents.react.base import ReActDocstoreAgent -from langchain.callbacks.manager import CallbackManager -from langchain.llms.openai import AzureOpenAI -from langchain.prompts import BasePromptTemplate, PromptTemplate - -from approaches.approach import Approach -from langchainadapters import HtmlCallbackHandler -from text import nonewlines - - -class ReadDecomposeAsk(Approach): - def __init__(self, search_client: SearchClient, openai_deployment: str, embedding_deployment: str, sourcepage_field: str, content_field: str): - self.search_client = search_client - self.openai_deployment = openai_deployment - self.embedding_deployment = embedding_deployment - self.sourcepage_field = sourcepage_field - self.content_field = content_field - - def search(self, query_text: str, overrides: dict[str, Any]) -> str: - has_text = overrides.get("retrieval_mode") in ["text", "hybrid", None] - has_vector = overrides.get("retrieval_mode") in ["vectors", "hybrid", None] - use_semantic_captions = True if overrides.get("semantic_captions") and has_text else False - top = overrides.get("top") or 3 - exclude_category = overrides.get("exclude_category") or None - filter = "category ne '{}'".format(exclude_category.replace("'", "''")) if exclude_category else None - - # If retrieval mode includes vectors, compute an embedding for the query - if has_vector: - query_vector = openai.Embedding.create(engine=self.embedding_deployment, input=query_text)["data"][0]["embedding"] - else: - query_vector = None - - # Only keep the text query if the retrieval mode uses text, otherwise drop it - if not has_text: - query_text = None - - if overrides.get("semantic_ranker") and has_text: - r = self.search_client.search(query_text, - filter=filter, - query_type=QueryType.SEMANTIC, - query_language="en-us", - query_speller="lexicon", - semantic_configuration_name="default", - top=top, - query_caption="extractive|highlight-false" if use_semantic_captions else None, - vector=query_vector, - top_k=50 if query_vector else None, - vector_fields="embedding" if query_vector else None) - else: - r = self.search_client.search(query_text, - filter=filter, - top=top, - vector=query_vector, - top_k=50 if query_vector else None, - vector_fields="embedding" if query_vector else None) - if use_semantic_captions: - self.results = [doc[self.sourcepage_field] + ":" + nonewlines(" . ".join([c.text for c in doc['@search.captions'] ])) for doc in r] - else: - self.results = [doc[self.sourcepage_field] + ":" + nonewlines(doc[self.content_field][:500]) for doc in r] - return "\n".join(self.results) - - def lookup(self, q: str) -> Optional[str]: - r = self.search_client.search(q, - top = 1, - include_total_count=True, - query_type=QueryType.SEMANTIC, - query_language="en-us", - query_speller="lexicon", - semantic_configuration_name="default", - query_answer="extractive|count-1", - query_caption="extractive|highlight-false") - - answers = r.get_answers() - if answers and len(answers) > 0: - return answers[0].text - if r.get_count() > 0: - return "\n".join(d['content'] for d in r) - return None - - def run(self, q: str, overrides: dict[str, Any]) -> Any: - # Not great to keep this as instance state, won't work with interleaving (e.g. if using async), but keeps the example simple - self.results = None - - # Use to capture thought process during iterations - cb_handler = HtmlCallbackHandler() - cb_manager = CallbackManager(handlers=[cb_handler]) - - llm = AzureOpenAI(deployment_name=self.openai_deployment, temperature=overrides.get("temperature") or 0.3, openai_api_key=openai.api_key) - tools = [ - Tool(name="Search", func=lambda q: self.search(q, overrides), description="useful for when you need to ask with search", callbacks=cb_manager), - Tool(name="Lookup", func=self.lookup, description="useful for when you need to ask with lookup", callbacks=cb_manager) - ] - - # Like results above, not great to keep this as a global, will interfere with interleaving - global prompt - prompt_prefix = overrides.get("prompt_template") - prompt = PromptTemplate.from_examples( - EXAMPLES, SUFFIX, ["input", "agent_scratchpad"], prompt_prefix + "\n\n" + PREFIX if prompt_prefix else PREFIX) - - agent = ReAct.from_llm_and_tools(llm, tools) - chain = AgentExecutor.from_agent_and_tools(agent, tools, verbose=True, callback_manager=cb_manager) - result = chain.run(q) - - # Replace substrings of the form with [file.ext] so that the frontend can render them as links, match them with a regex to avoid - # generalizing too much and disrupt HTML snippets if present - result = re.sub(r"<([a-zA-Z0-9_ \-\.]+)>", r"[\1]", result) - - return {"data_points": self.results or [], "answer": result, "thoughts": cb_handler.get_and_reset_log()} - -class ReAct(ReActDocstoreAgent): - @classmethod - def create_prompt(cls, tools: List[Tool]) -> BasePromptTemplate: - return prompt - -# Modified version of langchain's ReAct prompt that includes instructions and examples for how to cite information sources -EXAMPLES = [ - """Question: What is the elevation range for the area that the eastern sector of the -Colorado orogeny extends into? -Thought: I need to search Colorado orogeny, find the area that the eastern sector -of the Colorado orogeny extends into, then find the elevation range of the -area. -Action: Search[Colorado orogeny] -Observation: The Colorado orogeny was an episode of mountain building (an orogeny) in -Colorado and surrounding areas. -Thought: It does not mention the eastern sector. So I need to look up eastern -sector. -Action: Lookup[eastern sector] -Observation: (Result 1 / 1) The eastern sector extends into the High Plains and is called -the Central Plains orogeny. -Thought: The eastern sector of Colorado orogeny extends into the High Plains. So I -need to search High Plains and find its elevation range. -Action: Search[High Plains] -Observation: High Plains refers to one of two distinct land regions -Thought: I need to instead search High Plains (United States). -Action: Search[High Plains (United States)] -Observation: The High Plains are a subregion of the Great Plains. From east to west, the -High Plains rise in elevation from around 1,800 to 7,000 ft (550 to 2,130 -m). -Thought: High Plains rise in elevation from around 1,800 to 7,000 ft, so the answer -is 1,800 to 7,000 ft. -Action: Finish[1,800 to 7,000 ft ]""", - """Question: Musician and satirist Allie Goertz wrote a song about the "The Simpsons" -character Milhouse, who Matt Groening named after who? -Thought: The question simplifies to "The Simpsons" character Milhouse is named after -who. I only need to search Milhouse and find who it is named after. -Action: Search[Milhouse] -Observation: Milhouse Mussolini Van Houten is a recurring character in the Fox animated -television series The Simpsons voiced by Pamela Hayden and created by Matt -Groening. -Thought: The paragraph does not tell who Milhouse is named after, maybe I can look up -"named after". -Action: Lookup[named after] -Observation: (Result 1 / 1) Milhouse was named after U.S. president Richard Nixon, whose -middle name was Milhous. -Thought: Milhouse was named after U.S. president Richard Nixon, so the answer is -Richard Nixon. -Action: Finish[Richard Nixon ]""", - """Question: Which documentary is about Finnish rock groups, Adam Clayton Powell or The -Saimaa Gesture? -Thought: I need to search Adam Clayton Powell and The Saimaa Gesture, and find which -documentary is about Finnish rock groups. -Action: Search[Adam Clayton Powell] -Observation: Could not find [Adam Clayton Powell]. Similar: ['Adam Clayton Powell -III', 'Seventh Avenue (Manhattan)', 'Adam Clayton Powell Jr. State Office -Building', 'Isabel Washington Powell', 'Adam Powell', vAdam Clayton Powell -(film)', 'Giancarlo Esposito']. -Thought: To find the documentary, I can search Adam Clayton Powell (film). -Action: Search[Adam Clayton Powell (film)] -Observation: Adam Clayton Powell is a 1989 American documentary film directed by -Richard Kilberg. The film is about the rise and fall of influential -African-American politician Adam Clayton Powell Jr.[3][4] It was later aired -as part of the PBS series The American Experience. -Thought: Adam Clayton Powell (film) is a documentary about an African-American -politician, not Finnish rock groups. So the documentary about Finnish rock -groups must instead be The Saimaa Gesture. -Action: Finish[The Saimaa Gesture ]""", - """Question: What profession does Nicholas Ray and Elia Kazan have in common? -Thought: I need to search Nicholas Ray and Elia Kazan, find their professions, then -find the profession they have in common. -Action: Search[Nicholas Ray] -Observation: Nicholas Ray (born Raymond Nicholas Kienzle Jr., August 7, 1911 - June 16, -1979) was an American film director, screenwriter, and actor best known for -the 1955 film Rebel Without a Cause. -Thought: Professions of Nicholas Ray are director, screenwriter, and actor. I need -to search Elia Kazan next and find his professions. -Action: Search[Elia Kazan] -Observation: Elia Kazan was an American film and theatre director, producer, screenwriter -and actor. -Thought: Professions of Elia Kazan are director, producer, screenwriter, and actor. -So profession Nicholas Ray and Elia Kazan have in common is director, -screenwriter, and actor. -Action: Finish[director, screenwriter, actor ]""", - """Question: Which magazine was started first Arthur's Magazine or First for Women? -Thought: I need to search Arthur's Magazine and First for Women, and find which was -started first. -Action: Search[Arthur's Magazine] -Observation: Arthur's Magazine (1844-1846) was an American literary periodical published -in Philadelphia in the 19th century. -Thought: Arthur's Magazine was started in 1844. I need to search First for Women -next. -Action: Search[First for Women] -Observation: First for Women is a woman's magazine published by Bauer Media Group in the -USA.[1] The magazine was started in 1989. -Thought: First for Women was started in 1989. 1844 (Arthur's Magazine) < 1989 (First -for Women), so Arthur's Magazine was started first. -Action: Finish[Arthur's Magazine ]""", - """Question: Were Pavel Urysohn and Leonid Levin known for the same type of work? -Thought: I need to search Pavel Urysohn and Leonid Levin, find their types of work, -then find if they are the same. -Action: Search[Pavel Urysohn] -Observation: Pavel Samuilovich Urysohn (February 3, 1898 - August 17, 1924) was a Soviet -mathematician who is best known for his contributions in dimension theory. -Thought: Pavel Urysohn is a mathematician. I need to search Leonid Levin next and -find its type of work. -Action: Search[Leonid Levin] -Observation: Leonid Anatolievich Levin is a Soviet-American mathematician and computer -scientist. -Thought: Leonid Levin is a mathematician and computer scientist. So Pavel Urysohn -and Leonid Levin have the same type of work. -Action: Finish[yes ]""", -] -SUFFIX = """\nQuestion: {input} -{agent_scratchpad}""" -PREFIX = "Answer questions as shown in the following examples, by splitting the question into individual search or lookup actions to find facts until you can answer the question. " \ -"Observations are prefixed by their source name in angled brackets, source names MUST be included with the actions in the answers." \ -"All questions must be answered from the results from search or look up actions, only facts resulting from those can be used in an answer. " -"Answer questions as truthfully as possible, and ONLY answer the questions using the information from observations, do not speculate or your own knowledge." diff --git a/app/backend/approaches/readretrieveread.py b/app/backend/approaches/readretrieveread.py deleted file mode 100644 index de453e062e..0000000000 --- a/app/backend/approaches/readretrieveread.py +++ /dev/null @@ -1,149 +0,0 @@ -from typing import Any - -import openai -from azure.search.documents import SearchClient -from azure.search.documents.models import QueryType -from langchain.agents import AgentExecutor, Tool, ZeroShotAgent -from langchain.callbacks.manager import CallbackManager, Callbacks -from langchain.chains import LLMChain -from langchain.llms.openai import AzureOpenAI - -from approaches.approach import Approach -from langchainadapters import HtmlCallbackHandler -from lookuptool import CsvLookupTool -from text import nonewlines - - -class ReadRetrieveReadApproach(Approach): - """ - Attempt to answer questions by iteratively evaluating the question to see what information is missing, and once all information - is present then formulate an answer. Each iteration consists of two parts: - 1. use GPT to see if we need more information - 2. if more data is needed, use the requested "tool" to retrieve it. - The last call to GPT answers the actual question. - This is inspired by the MKRL paper[1] and applied here using the implementation in Langchain. - - [1] E. Karpas, et al. arXiv:2205.00445 - """ - - template_prefix = \ -"You are an intelligent assistant helping Contoso Inc employees with their healthcare plan questions and employee handbook questions. " \ -"Answer the question using only the data provided in the information sources below. " \ -"For tabular information return it as an html table. Do not return markdown format. " \ -"Each source has a name followed by colon and the actual data, quote the source name for each piece of data you use in the response. " \ -"For example, if the question is \"What color is the sky?\" and one of the information sources says \"info123: the sky is blue whenever it's not cloudy\", then answer with \"The sky is blue [info123]\" " \ -"It's important to strictly follow the format where the name of the source is in square brackets at the end of the sentence, and only up to the prefix before the colon (\":\"). " \ -"If there are multiple sources, cite each one in their own square brackets. For example, use \"[info343][ref-76]\" and not \"[info343,ref-76]\". " \ -"Never quote tool names as sources." \ -"If you cannot answer using the sources below, say that you don't know. " \ -"\n\nYou can access to the following tools:" - - template_suffix = """ -Begin! - -Question: {input} - -Thought: {agent_scratchpad}""" - - CognitiveSearchToolDescription = "useful for searching the Microsoft employee benefits information such as healthcare plans, retirement plans, etc." - - def __init__(self, search_client: SearchClient, openai_deployment: str, embedding_deployment: str, sourcepage_field: str, content_field: str): - self.search_client = search_client - self.openai_deployment = openai_deployment - self.embedding_deployment = embedding_deployment - self.sourcepage_field = sourcepage_field - self.content_field = content_field - - def retrieve(self, query_text: str, overrides: dict[str, Any]) -> Any: - has_text = overrides.get("retrieval_mode") in ["text", "hybrid", None] - has_vector = overrides.get("retrieval_mode") in ["vectors", "hybrid", None] - use_semantic_captions = True if overrides.get("semantic_captions") and has_text else False - top = overrides.get("top") or 3 - exclude_category = overrides.get("exclude_category") or None - filter = "category ne '{}'".format(exclude_category.replace("'", "''")) if exclude_category else None - - # If retrieval mode includes vectors, compute an embedding for the query - if has_vector: - query_vector = openai.Embedding.create(engine=self.embedding_deployment, input=query_text)["data"][0]["embedding"] - else: - query_vector = None - - # Only keep the text query if the retrieval mode uses text, otherwise drop it - if not has_text: - query_text = None - - # Use semantic ranker if requested and if retrieval mode is text or hybrid (vectors + text) - if overrides.get("semantic_ranker") and has_text: - r = self.search_client.search(query_text, - filter=filter, - query_type=QueryType.SEMANTIC, - query_language="en-us", - query_speller="lexicon", - semantic_configuration_name="default", - top = top, - query_caption="extractive|highlight-false" if use_semantic_captions else None, - vector=query_vector, - top_k=50 if query_vector else None, - vector_fields="embedding" if query_vector else None) - else: - r = self.search_client.search(query_text, - filter=filter, - top=top, - vector=query_vector, - top_k=50 if query_vector else None, - vector_fields="embedding" if query_vector else None) - if use_semantic_captions: - self.results = [doc[self.sourcepage_field] + ":" + nonewlines(" -.- ".join([c.text for c in doc['@search.captions']])) for doc in r] - else: - self.results = [doc[self.sourcepage_field] + ":" + nonewlines(doc[self.content_field][:250]) for doc in r] - content = "\n".join(self.results) - return content - - def run(self, q: str, overrides: dict[str, Any]) -> Any: - # Not great to keep this as instance state, won't work with interleaving (e.g. if using async), but keeps the example simple - self.results = None - - # Use to capture thought process during iterations - cb_handler = HtmlCallbackHandler() - cb_manager = CallbackManager(handlers=[cb_handler]) - - acs_tool = Tool(name="CognitiveSearch", - func=lambda q: self.retrieve(q, overrides), - description=self.CognitiveSearchToolDescription, - callbacks=cb_manager) - employee_tool = EmployeeInfoTool("Employee1", callbacks=cb_manager) - tools = [acs_tool, employee_tool] - - prompt = ZeroShotAgent.create_prompt( - tools=tools, - prefix=overrides.get("prompt_template_prefix") or self.template_prefix, - suffix=overrides.get("prompt_template_suffix") or self.template_suffix, - input_variables = ["input", "agent_scratchpad"]) - llm = AzureOpenAI(deployment_name=self.openai_deployment, temperature=overrides.get("temperature") or 0.3, openai_api_key=openai.api_key) - chain = LLMChain(llm = llm, prompt = prompt) - agent_exec = AgentExecutor.from_agent_and_tools( - agent = ZeroShotAgent(llm_chain = chain, tools = tools), - tools = tools, - verbose = True, - callback_manager = cb_manager) - result = agent_exec.run(q) - - # Remove references to tool names that might be confused with a citation - result = result.replace("[CognitiveSearch]", "").replace("[Employee]", "") - - return {"data_points": self.results or [], "answer": result, "thoughts": cb_handler.get_and_reset_log()} - -class EmployeeInfoTool(CsvLookupTool): - employee_name: str = "" - - def __init__(self, employee_name: str, callbacks: Callbacks = None): - super().__init__(filename="data/employeeinfo.csv", - key_field="name", - name="Employee", - description="useful for answering questions about the employee, their benefits and other personal information", - callbacks=callbacks) - self.func = self.employee_info - self.employee_name = employee_name - - def employee_info(self, name: str) -> str: - return self.lookup(name) diff --git a/app/backend/approaches/retrievethenread.py b/app/backend/approaches/retrievethenread.py index 2ce52ee9ec..d59f903b0e 100644 --- a/app/backend/approaches/retrievethenread.py +++ b/app/backend/approaches/retrievethenread.py @@ -1,109 +1,232 @@ -from typing import Any +from typing import Any, Optional, cast -import openai -from azure.search.documents import SearchClient -from azure.search.documents.models import QueryType +from azure.search.documents.agent.aio import KnowledgeAgentRetrievalClient +from azure.search.documents.aio import SearchClient +from azure.search.documents.models import VectorQuery +from openai import AsyncOpenAI +from openai.types.chat import ChatCompletion, ChatCompletionMessageParam -from approaches.approach import Approach -from core.messagebuilder import MessageBuilder -from text import nonewlines +from approaches.approach import Approach, DataPoints, ExtraInfo, ThoughtStep +from approaches.promptmanager import PromptManager +from core.authentication import AuthenticationHelper class RetrieveThenReadApproach(Approach): """ - Simple retrieve-then-read implementation, using the Cognitive Search and OpenAI APIs directly. It first retrieves + Simple retrieve-then-read implementation, using the AI Search and OpenAI APIs directly. It first retrieves top documents from search, then constructs a prompt with them, and then uses OpenAI to generate an completion (answer) with that prompt. """ - system_chat_template = \ -"You are an intelligent assistant helping Contoso Inc employees with their healthcare plan questions and employee handbook questions. " + \ -"Use 'you' to refer to the individual asking the questions even if they ask with 'I'. " + \ -"Answer the following question using only the data provided in the sources below. " + \ -"For tabular information return it as an html table. Do not return markdown format. " + \ -"Each source has a name followed by colon and the actual information, always include the source name for each fact you use in the response. " + \ -"If you cannot answer using the sources below, say you don't know. Use below example to answer" - - #shots/sample conversation - question = """ -'What is the deductible for the employee plan for a visit to Overlake in Bellevue?' - -Sources: -info1.txt: deductibles depend on whether you are in-network or out-of-network. In-network deductibles are $500 for employee and $1000 for family. Out-of-network deductibles are $1000 for employee and $2000 for family. -info2.pdf: Overlake is in-network for the employee plan. -info3.pdf: Overlake is the name of the area that includes a park and ride near Bellevue. -info4.pdf: In-network institutions include Overlake, Swedish and others in the region -""" - answer = "In-network deductibles are $500 for employee and $1000 for family [info1.txt] and Overlake is in-network for the employee plan [info2.pdf][info4.pdf]." - - def __init__(self, search_client: SearchClient, openai_deployment: str, chatgpt_model: str, embedding_deployment: str, sourcepage_field: str, content_field: str): + def __init__( + self, + *, + search_client: SearchClient, + search_index_name: str, + agent_model: Optional[str], + agent_deployment: Optional[str], + agent_client: KnowledgeAgentRetrievalClient, + auth_helper: AuthenticationHelper, + openai_client: AsyncOpenAI, + chatgpt_model: str, + chatgpt_deployment: Optional[str], # Not needed for non-Azure OpenAI + embedding_model: str, + embedding_deployment: Optional[str], # Not needed for non-Azure OpenAI or for retrieval_mode="text" + embedding_dimensions: int, + embedding_field: str, + sourcepage_field: str, + content_field: str, + query_language: str, + query_speller: str, + prompt_manager: PromptManager, + reasoning_effort: Optional[str] = None, + ): self.search_client = search_client - self.openai_deployment = openai_deployment + self.search_index_name = search_index_name + self.agent_model = agent_model + self.agent_deployment = agent_deployment + self.agent_client = agent_client + self.chatgpt_deployment = chatgpt_deployment + self.openai_client = openai_client + self.auth_helper = auth_helper self.chatgpt_model = chatgpt_model + self.embedding_model = embedding_model + self.embedding_dimensions = embedding_dimensions + self.chatgpt_deployment = chatgpt_deployment self.embedding_deployment = embedding_deployment + self.embedding_field = embedding_field self.sourcepage_field = sourcepage_field self.content_field = content_field - - def run(self, q: str, overrides: dict[str, Any]) -> Any: - has_text = overrides.get("retrieval_mode") in ["text", "hybrid", None] - has_vector = overrides.get("retrieval_mode") in ["vectors", "hybrid", None] - use_semantic_captions = True if overrides.get("semantic_captions") and has_text else False - top = overrides.get("top") or 3 - exclude_category = overrides.get("exclude_category") or None - filter = "category ne '{}'".format(exclude_category.replace("'", "''")) if exclude_category else None - - # If retrieval mode includes vectors, compute an embedding for the query - if has_vector: - query_vector = openai.Embedding.create(engine=self.embedding_deployment, input=q)["data"][0]["embedding"] - else: - query_vector = None - - # Only keep the text query if the retrieval mode uses text, otherwise drop it - query_text = q if has_text else None - - # Use semantic ranker if requested and if retrieval mode is text or hybrid (vectors + text) - if overrides.get("semantic_ranker") and has_text: - r = self.search_client.search(query_text, - filter=filter, - query_type=QueryType.SEMANTIC, - query_language="en-us", - query_speller="lexicon", - semantic_configuration_name="default", - top=top, - query_caption="extractive|highlight-false" if use_semantic_captions else None, - vector=query_vector, - top_k=50 if query_vector else None, - vector_fields="embedding" if query_vector else None) - else: - r = self.search_client.search(query_text, - filter=filter, - top=top, - vector=query_vector, - top_k=50 if query_vector else None, - vector_fields="embedding" if query_vector else None) - if use_semantic_captions: - results = [doc[self.sourcepage_field] + ": " + nonewlines(" . ".join([c.text for c in doc['@search.captions']])) for doc in r] + self.query_language = query_language + self.query_speller = query_speller + self.prompt_manager = prompt_manager + self.answer_prompt = self.prompt_manager.load_prompt("ask_answer_question.prompty") + self.reasoning_effort = reasoning_effort + self.include_token_usage = True + + async def run( + self, + messages: list[ChatCompletionMessageParam], + session_state: Any = None, + context: dict[str, Any] = {}, + ) -> dict[str, Any]: + overrides = context.get("overrides", {}) + auth_claims = context.get("auth_claims", {}) + use_agentic_retrieval = True if overrides.get("use_agentic_retrieval") else False + q = messages[-1]["content"] + if not isinstance(q, str): + raise ValueError("The most recent message content must be a string.") + + if use_agentic_retrieval: + extra_info = await self.run_agentic_retrieval_approach(messages, overrides, auth_claims) else: - results = [doc[self.sourcepage_field] + ": " + nonewlines(doc[self.content_field]) for doc in r] - content = "\n".join(results) + extra_info = await self.run_search_approach(messages, overrides, auth_claims) + + # Process results + messages = self.prompt_manager.render_prompt( + self.answer_prompt, + self.get_system_prompt_variables(overrides.get("prompt_template")) + | {"user_query": q, "text_sources": extra_info.data_points.text}, + ) + + chat_completion = cast( + ChatCompletion, + await self.create_chat_completion( + self.chatgpt_deployment, + self.chatgpt_model, + messages=messages, + overrides=overrides, + response_token_limit=self.get_response_token_limit(self.chatgpt_model, 1024), + ), + ) + extra_info.thoughts.append( + self.format_thought_step_for_chatcompletion( + title="Prompt to generate answer", + messages=messages, + overrides=overrides, + model=self.chatgpt_model, + deployment=self.chatgpt_deployment, + usage=chat_completion.usage, + ) + ) + return { + "message": { + "content": chat_completion.choices[0].message.content, + "role": chat_completion.choices[0].message.role, + }, + "context": extra_info, + "session_state": session_state, + } + + async def run_search_approach( + self, messages: list[ChatCompletionMessageParam], overrides: dict[str, Any], auth_claims: dict[str, Any] + ): + use_text_search = overrides.get("retrieval_mode") in ["text", "hybrid", None] + use_vector_search = overrides.get("retrieval_mode") in ["vectors", "hybrid", None] + use_semantic_ranker = True if overrides.get("semantic_ranker") else False + use_query_rewriting = True if overrides.get("query_rewriting") else False + use_semantic_captions = True if overrides.get("semantic_captions") else False + top = overrides.get("top", 3) + minimum_search_score = overrides.get("minimum_search_score", 0.0) + minimum_reranker_score = overrides.get("minimum_reranker_score", 0.0) + filter = self.build_filter(overrides, auth_claims) + q = str(messages[-1]["content"]) - message_builder = MessageBuilder(overrides.get("prompt_template") or self.system_chat_template, self.chatgpt_model) - - # add user question - user_content = q + "\n" + f"Sources:\n {content}" - message_builder.append_message('user', user_content) - - # Add shots/samples. This helps model to mimic response and make sure they match rules laid out in system message. - message_builder.append_message('assistant', self.answer) - message_builder.append_message('user', self.question) - - messages = message_builder.messages - chat_completion = openai.ChatCompletion.create( - deployment_id=self.openai_deployment, - model=self.chatgpt_model, - messages=messages, - temperature=overrides.get("temperature") or 0.3, - max_tokens=1024, - n=1) - - return {"data_points": results, "answer": chat_completion.choices[0].message.content, "thoughts": f"Question:
{query_text}

Prompt:
" + '\n\n'.join([str(message) for message in messages])} + # If retrieval mode includes vectors, compute an embedding for the query + vectors: list[VectorQuery] = [] + if use_vector_search: + vectors.append(await self.compute_text_embedding(q)) + + results = await self.search( + top, + q, + filter, + vectors, + use_text_search, + use_vector_search, + use_semantic_ranker, + use_semantic_captions, + minimum_search_score, + minimum_reranker_score, + use_query_rewriting, + ) + + text_sources = self.get_sources_content(results, use_semantic_captions, use_image_citation=False) + + return ExtraInfo( + DataPoints(text=text_sources), + thoughts=[ + ThoughtStep( + "Search using user query", + q, + { + "use_semantic_captions": use_semantic_captions, + "use_semantic_ranker": use_semantic_ranker, + "use_query_rewriting": use_query_rewriting, + "top": top, + "filter": filter, + "use_vector_search": use_vector_search, + "use_text_search": use_text_search, + }, + ), + ThoughtStep( + "Search results", + [result.serialize_for_results() for result in results], + ), + ], + ) + + async def run_agentic_retrieval_approach( + self, + messages: list[ChatCompletionMessageParam], + overrides: dict[str, Any], + auth_claims: dict[str, Any], + ): + minimum_reranker_score = overrides.get("minimum_reranker_score", 0) + search_index_filter = self.build_filter(overrides, auth_claims) + top = overrides.get("top", 3) + max_subqueries = overrides.get("max_subqueries", 10) + results_merge_strategy = overrides.get("results_merge_strategy", "interleaved") + # 50 is the amount of documents that the reranker can process per query + max_docs_for_reranker = max_subqueries * 50 + + response, results = await self.run_agentic_retrieval( + messages, + self.agent_client, + search_index_name=self.search_index_name, + top=top, + filter_add_on=search_index_filter, + minimum_reranker_score=minimum_reranker_score, + max_docs_for_reranker=max_docs_for_reranker, + results_merge_strategy=results_merge_strategy, + ) + + text_sources = self.get_sources_content(results, use_semantic_captions=False, use_image_citation=False) + + extra_info = ExtraInfo( + DataPoints(text=text_sources), + thoughts=[ + ThoughtStep( + "Use agentic retrieval", + messages, + { + "reranker_threshold": minimum_reranker_score, + "max_docs_for_reranker": max_docs_for_reranker, + "results_merge_strategy": results_merge_strategy, + "filter": search_index_filter, + }, + ), + ThoughtStep( + f"Agentic retrieval results (top {top})", + [result.serialize_for_results() for result in results], + { + "query_plan": ( + [activity.as_dict() for activity in response.activity] if response.activity else None + ), + "model": self.agent_model, + "deployment": self.agent_deployment, + }, + ), + ], + ) + return extra_info diff --git a/app/backend/approaches/retrievethenreadvision.py b/app/backend/approaches/retrievethenreadvision.py new file mode 100644 index 0000000000..a021537c52 --- /dev/null +++ b/app/backend/approaches/retrievethenreadvision.py @@ -0,0 +1,181 @@ +from collections.abc import Awaitable +from typing import Any, Callable, Optional + +from azure.search.documents.aio import SearchClient +from azure.storage.blob.aio import ContainerClient +from openai import AsyncOpenAI +from openai.types.chat import ( + ChatCompletionMessageParam, +) + +from approaches.approach import Approach, DataPoints, ExtraInfo, ThoughtStep +from approaches.promptmanager import PromptManager +from core.authentication import AuthenticationHelper +from core.imageshelper import fetch_image + + +class RetrieveThenReadVisionApproach(Approach): + """ + Simple retrieve-then-read implementation, using the AI Search and OpenAI APIs directly. It first retrieves + top documents including images from search, then constructs a prompt with them, and then uses OpenAI to generate an completion + (answer) with that prompt. + """ + + def __init__( + self, + *, + search_client: SearchClient, + blob_container_client: ContainerClient, + openai_client: AsyncOpenAI, + auth_helper: AuthenticationHelper, + gpt4v_deployment: Optional[str], + gpt4v_model: str, + embedding_deployment: Optional[str], # Not needed for non-Azure OpenAI or for retrieval_mode="text" + embedding_model: str, + embedding_dimensions: int, + embedding_field: str, + sourcepage_field: str, + content_field: str, + query_language: str, + query_speller: str, + vision_endpoint: str, + vision_token_provider: Callable[[], Awaitable[str]], + prompt_manager: PromptManager, + ): + self.search_client = search_client + self.blob_container_client = blob_container_client + self.openai_client = openai_client + self.auth_helper = auth_helper + self.embedding_model = embedding_model + self.embedding_deployment = embedding_deployment + self.embedding_dimensions = embedding_dimensions + self.embedding_field = embedding_field + self.sourcepage_field = sourcepage_field + self.content_field = content_field + self.gpt4v_deployment = gpt4v_deployment + self.gpt4v_model = gpt4v_model + self.query_language = query_language + self.query_speller = query_speller + self.vision_endpoint = vision_endpoint + self.vision_token_provider = vision_token_provider + self.prompt_manager = prompt_manager + self.answer_prompt = self.prompt_manager.load_prompt("ask_answer_question_vision.prompty") + # Currently disabled due to issues with rendering token usage in the UI + self.include_token_usage = False + + async def run( + self, + messages: list[ChatCompletionMessageParam], + session_state: Any = None, + context: dict[str, Any] = {}, + ) -> dict[str, Any]: + q = messages[-1]["content"] + if not isinstance(q, str): + raise ValueError("The most recent message content must be a string.") + + overrides = context.get("overrides", {}) + seed = overrides.get("seed", None) + auth_claims = context.get("auth_claims", {}) + use_text_search = overrides.get("retrieval_mode") in ["text", "hybrid", None] + use_vector_search = overrides.get("retrieval_mode") in ["vectors", "hybrid", None] + use_semantic_ranker = True if overrides.get("semantic_ranker") else False + use_query_rewriting = True if overrides.get("query_rewriting") else False + use_semantic_captions = True if overrides.get("semantic_captions") else False + top = overrides.get("top", 3) + minimum_search_score = overrides.get("minimum_search_score", 0.0) + minimum_reranker_score = overrides.get("minimum_reranker_score", 0.0) + filter = self.build_filter(overrides, auth_claims) + + vector_fields = overrides.get("vector_fields", "textAndImageEmbeddings") + send_text_to_gptvision = overrides.get("gpt4v_input") in ["textAndImages", "texts", None] + send_images_to_gptvision = overrides.get("gpt4v_input") in ["textAndImages", "images", None] + + # If retrieval mode includes vectors, compute an embedding for the query + vectors = [] + if use_vector_search: + if vector_fields == "textEmbeddingOnly" or vector_fields == "textAndImageEmbeddings": + vectors.append(await self.compute_text_embedding(q)) + if vector_fields == "imageEmbeddingOnly" or vector_fields == "textAndImageEmbeddings": + vectors.append(await self.compute_image_embedding(q)) + + results = await self.search( + top, + q, + filter, + vectors, + use_text_search, + use_vector_search, + use_semantic_ranker, + use_semantic_captions, + minimum_search_score, + minimum_reranker_score, + use_query_rewriting, + ) + + # Process results + text_sources = [] + image_sources = [] + if send_text_to_gptvision: + text_sources = self.get_sources_content(results, use_semantic_captions, use_image_citation=True) + if send_images_to_gptvision: + for result in results: + url = await fetch_image(self.blob_container_client, result) + if url: + image_sources.append(url) + + messages = self.prompt_manager.render_prompt( + self.answer_prompt, + self.get_system_prompt_variables(overrides.get("prompt_template")) + | {"user_query": q, "text_sources": text_sources, "image_sources": image_sources}, + ) + + chat_completion = await self.openai_client.chat.completions.create( + model=self.gpt4v_deployment if self.gpt4v_deployment else self.gpt4v_model, + messages=messages, + temperature=overrides.get("temperature", 0.3), + max_tokens=1024, + n=1, + seed=seed, + ) + + extra_info = ExtraInfo( + DataPoints(text=text_sources, images=image_sources), + [ + ThoughtStep( + "Search using user query", + q, + { + "use_semantic_captions": use_semantic_captions, + "use_semantic_ranker": use_semantic_ranker, + "use_query_rewriting": use_query_rewriting, + "top": top, + "filter": filter, + "vector_fields": vector_fields, + "use_vector_search": use_vector_search, + "use_text_search": use_text_search, + }, + ), + ThoughtStep( + "Search results", + [result.serialize_for_results() for result in results], + ), + ThoughtStep( + "Prompt to generate answer", + messages, + ( + {"model": self.gpt4v_model, "deployment": self.gpt4v_deployment} + if self.gpt4v_deployment + else {"model": self.gpt4v_model} + ), + ), + ], + ) + + return { + "message": { + "content": chat_completion.choices[0].message.content, + "role": chat_completion.choices[0].message.role, + }, + "context": extra_info, + "session_state": session_state, + } diff --git a/app/backend/chat_history/__init__.py b/app/backend/chat_history/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/app/backend/chat_history/cosmosdb.py b/app/backend/chat_history/cosmosdb.py new file mode 100644 index 0000000000..764278bc76 --- /dev/null +++ b/app/backend/chat_history/cosmosdb.py @@ -0,0 +1,239 @@ +import os +import time +from typing import Any, Union + +from azure.cosmos.aio import ContainerProxy, CosmosClient +from azure.identity.aio import AzureDeveloperCliCredential, ManagedIdentityCredential +from quart import Blueprint, current_app, jsonify, make_response, request + +from config import ( + CONFIG_CHAT_HISTORY_COSMOS_ENABLED, + CONFIG_COSMOS_HISTORY_CLIENT, + CONFIG_COSMOS_HISTORY_CONTAINER, + CONFIG_COSMOS_HISTORY_VERSION, + CONFIG_CREDENTIAL, +) +from decorators import authenticated +from error import error_response + +chat_history_cosmosdb_bp = Blueprint("chat_history_cosmos", __name__, static_folder="static") + + +@chat_history_cosmosdb_bp.post("/chat_history") +@authenticated +async def post_chat_history(auth_claims: dict[str, Any]): + if not current_app.config[CONFIG_CHAT_HISTORY_COSMOS_ENABLED]: + return jsonify({"error": "Chat history not enabled"}), 400 + + container: ContainerProxy = current_app.config[CONFIG_COSMOS_HISTORY_CONTAINER] + if not container: + return jsonify({"error": "Chat history not enabled"}), 400 + + entra_oid = auth_claims.get("oid") + if not entra_oid: + return jsonify({"error": "User OID not found"}), 401 + + try: + request_json = await request.get_json() + session_id = request_json.get("id") + message_pairs = request_json.get("answers") + first_question = message_pairs[0][0] + title = first_question + "..." if len(first_question) > 50 else first_question + timestamp = int(time.time() * 1000) + + # Insert the session item: + session_item = { + "id": session_id, + "version": current_app.config[CONFIG_COSMOS_HISTORY_VERSION], + "session_id": session_id, + "entra_oid": entra_oid, + "type": "session", + "title": title, + "timestamp": timestamp, + } + + message_pair_items = [] + # Now insert a message item for each question/response pair: + for ind, message_pair in enumerate(message_pairs): + message_pair_items.append( + { + "id": f"{session_id}-{ind}", + "version": current_app.config[CONFIG_COSMOS_HISTORY_VERSION], + "session_id": session_id, + "entra_oid": entra_oid, + "type": "message_pair", + "question": message_pair[0], + "response": message_pair[1], + } + ) + + batch_operations = [("upsert", (session_item,))] + [ + ("upsert", (message_pair_item,)) for message_pair_item in message_pair_items + ] + await container.execute_item_batch(batch_operations=batch_operations, partition_key=[entra_oid, session_id]) + return jsonify({}), 201 + except Exception as error: + return error_response(error, "/chat_history") + + +@chat_history_cosmosdb_bp.get("/chat_history/sessions") +@authenticated +async def get_chat_history_sessions(auth_claims: dict[str, Any]): + if not current_app.config[CONFIG_CHAT_HISTORY_COSMOS_ENABLED]: + return jsonify({"error": "Chat history not enabled"}), 400 + + container: ContainerProxy = current_app.config[CONFIG_COSMOS_HISTORY_CONTAINER] + if not container: + return jsonify({"error": "Chat history not enabled"}), 400 + + entra_oid = auth_claims.get("oid") + if not entra_oid: + return jsonify({"error": "User OID not found"}), 401 + + try: + count = int(request.args.get("count", 10)) + continuation_token = request.args.get("continuation_token") + + res = container.query_items( + query="SELECT c.id, c.entra_oid, c.title, c.timestamp FROM c WHERE c.entra_oid = @entra_oid AND c.type = @type ORDER BY c.timestamp DESC", + parameters=[dict(name="@entra_oid", value=entra_oid), dict(name="@type", value="session")], + partition_key=[entra_oid], + max_item_count=count, + ) + + pager = res.by_page(continuation_token) + + # Get the first page, and the continuation token + sessions = [] + try: + page = await pager.__anext__() + continuation_token = pager.continuation_token # type: ignore + + async for item in page: + sessions.append( + { + "id": item.get("id"), + "entra_oid": item.get("entra_oid"), + "title": item.get("title", "untitled"), + "timestamp": item.get("timestamp"), + } + ) + + # If there are no more pages, StopAsyncIteration is raised + except StopAsyncIteration: + continuation_token = None + + return jsonify({"sessions": sessions, "continuation_token": continuation_token}), 200 + + except Exception as error: + return error_response(error, "/chat_history/sessions") + + +@chat_history_cosmosdb_bp.get("/chat_history/sessions/") +@authenticated +async def get_chat_history_session(auth_claims: dict[str, Any], session_id: str): + if not current_app.config[CONFIG_CHAT_HISTORY_COSMOS_ENABLED]: + return jsonify({"error": "Chat history not enabled"}), 400 + + container: ContainerProxy = current_app.config[CONFIG_COSMOS_HISTORY_CONTAINER] + if not container: + return jsonify({"error": "Chat history not enabled"}), 400 + + entra_oid = auth_claims.get("oid") + if not entra_oid: + return jsonify({"error": "User OID not found"}), 401 + + try: + res = container.query_items( + query="SELECT * FROM c WHERE c.session_id = @session_id AND c.type = @type", + parameters=[dict(name="@session_id", value=session_id), dict(name="@type", value="message_pair")], + partition_key=[entra_oid, session_id], + ) + + message_pairs = [] + async for page in res.by_page(): + async for item in page: + message_pairs.append([item["question"], item["response"]]) + + return ( + jsonify( + { + "id": session_id, + "entra_oid": entra_oid, + "answers": message_pairs, + } + ), + 200, + ) + except Exception as error: + return error_response(error, f"/chat_history/sessions/{session_id}") + + +@chat_history_cosmosdb_bp.delete("/chat_history/sessions/") +@authenticated +async def delete_chat_history_session(auth_claims: dict[str, Any], session_id: str): + if not current_app.config[CONFIG_CHAT_HISTORY_COSMOS_ENABLED]: + return jsonify({"error": "Chat history not enabled"}), 400 + + container: ContainerProxy = current_app.config[CONFIG_COSMOS_HISTORY_CONTAINER] + if not container: + return jsonify({"error": "Chat history not enabled"}), 400 + + entra_oid = auth_claims.get("oid") + if not entra_oid: + return jsonify({"error": "User OID not found"}), 401 + + try: + res = container.query_items( + query="SELECT c.id FROM c WHERE c.session_id = @session_id", + parameters=[dict(name="@session_id", value=session_id)], + partition_key=[entra_oid, session_id], + ) + + ids_to_delete = [] + async for page in res.by_page(): + async for item in page: + ids_to_delete.append(item["id"]) + + batch_operations = [("delete", (id,)) for id in ids_to_delete] + await container.execute_item_batch(batch_operations=batch_operations, partition_key=[entra_oid, session_id]) + return await make_response("", 204) + except Exception as error: + return error_response(error, f"/chat_history/sessions/{session_id}") + + +@chat_history_cosmosdb_bp.before_app_serving +async def setup_clients(): + USE_CHAT_HISTORY_COSMOS = os.getenv("USE_CHAT_HISTORY_COSMOS", "").lower() == "true" + AZURE_COSMOSDB_ACCOUNT = os.getenv("AZURE_COSMOSDB_ACCOUNT") + AZURE_CHAT_HISTORY_DATABASE = os.getenv("AZURE_CHAT_HISTORY_DATABASE") + AZURE_CHAT_HISTORY_CONTAINER = os.getenv("AZURE_CHAT_HISTORY_CONTAINER") + + azure_credential: Union[AzureDeveloperCliCredential, ManagedIdentityCredential] = current_app.config[ + CONFIG_CREDENTIAL + ] + + if USE_CHAT_HISTORY_COSMOS: + current_app.logger.info("USE_CHAT_HISTORY_COSMOS is true, setting up CosmosDB client") + if not AZURE_COSMOSDB_ACCOUNT: + raise ValueError("AZURE_COSMOSDB_ACCOUNT must be set when USE_CHAT_HISTORY_COSMOS is true") + if not AZURE_CHAT_HISTORY_DATABASE: + raise ValueError("AZURE_CHAT_HISTORY_DATABASE must be set when USE_CHAT_HISTORY_COSMOS is true") + if not AZURE_CHAT_HISTORY_CONTAINER: + raise ValueError("AZURE_CHAT_HISTORY_CONTAINER must be set when USE_CHAT_HISTORY_COSMOS is true") + cosmos_client = CosmosClient( + url=f"https://{AZURE_COSMOSDB_ACCOUNT}.documents.azure.com:443/", credential=azure_credential + ) + cosmos_db = cosmos_client.get_database_client(AZURE_CHAT_HISTORY_DATABASE) + cosmos_container = cosmos_db.get_container_client(AZURE_CHAT_HISTORY_CONTAINER) + + current_app.config[CONFIG_COSMOS_HISTORY_CLIENT] = cosmos_client + current_app.config[CONFIG_COSMOS_HISTORY_CONTAINER] = cosmos_container + current_app.config[CONFIG_COSMOS_HISTORY_VERSION] = os.environ["AZURE_CHAT_HISTORY_VERSION"] + + +@chat_history_cosmosdb_bp.after_app_serving +async def close_clients(): + if current_app.config.get(CONFIG_COSMOS_HISTORY_CLIENT): + cosmos_client: CosmosClient = current_app.config[CONFIG_COSMOS_HISTORY_CLIENT] + await cosmos_client.close() diff --git a/app/backend/config.py b/app/backend/config.py new file mode 100644 index 0000000000..443c0171fa --- /dev/null +++ b/app/backend/config.py @@ -0,0 +1,36 @@ +CONFIG_OPENAI_TOKEN = "openai_token" +CONFIG_CREDENTIAL = "azure_credential" +CONFIG_ASK_APPROACH = "ask_approach" +CONFIG_ASK_VISION_APPROACH = "ask_vision_approach" +CONFIG_CHAT_VISION_APPROACH = "chat_vision_approach" +CONFIG_CHAT_APPROACH = "chat_approach" +CONFIG_BLOB_CONTAINER_CLIENT = "blob_container_client" +CONFIG_USER_UPLOAD_ENABLED = "user_upload_enabled" +CONFIG_USER_BLOB_CONTAINER_CLIENT = "user_blob_container_client" +CONFIG_AUTH_CLIENT = "auth_client" +CONFIG_GPT4V_DEPLOYED = "gpt4v_deployed" +CONFIG_SEMANTIC_RANKER_DEPLOYED = "semantic_ranker_deployed" +CONFIG_QUERY_REWRITING_ENABLED = "query_rewriting_enabled" +CONFIG_REASONING_EFFORT_ENABLED = "reasoning_effort_enabled" +CONFIG_VISION_REASONING_EFFORT_ENABLED = "vision_reasoning_effort_enabled" +CONFIG_DEFAULT_REASONING_EFFORT = "default_reasoning_effort" +CONFIG_VECTOR_SEARCH_ENABLED = "vector_search_enabled" +CONFIG_SEARCH_CLIENT = "search_client" +CONFIG_OPENAI_CLIENT = "openai_client" +CONFIG_AGENT_CLIENT = "agent_client" +CONFIG_INGESTER = "ingester" +CONFIG_LANGUAGE_PICKER_ENABLED = "language_picker_enabled" +CONFIG_SPEECH_INPUT_ENABLED = "speech_input_enabled" +CONFIG_SPEECH_OUTPUT_BROWSER_ENABLED = "speech_output_browser_enabled" +CONFIG_SPEECH_OUTPUT_AZURE_ENABLED = "speech_output_azure_enabled" +CONFIG_SPEECH_SERVICE_ID = "speech_service_id" +CONFIG_SPEECH_SERVICE_LOCATION = "speech_service_location" +CONFIG_SPEECH_SERVICE_TOKEN = "speech_service_token" +CONFIG_SPEECH_SERVICE_VOICE = "speech_service_voice" +CONFIG_STREAMING_ENABLED = "streaming_enabled" +CONFIG_CHAT_HISTORY_BROWSER_ENABLED = "chat_history_browser_enabled" +CONFIG_CHAT_HISTORY_COSMOS_ENABLED = "chat_history_cosmos_enabled" +CONFIG_AGENTIC_RETRIEVAL_ENABLED = "agentic_retrieval" +CONFIG_COSMOS_HISTORY_CLIENT = "cosmos_history_client" +CONFIG_COSMOS_HISTORY_CONTAINER = "cosmos_history_container" +CONFIG_COSMOS_HISTORY_VERSION = "cosmos_history_version" diff --git a/app/backend/core/authentication.py b/app/backend/core/authentication.py new file mode 100644 index 0000000000..2c9aaf87d4 --- /dev/null +++ b/app/backend/core/authentication.py @@ -0,0 +1,361 @@ +# Refactored from https://github.com/Azure-Samples/ms-identity-python-on-behalf-of + +import base64 +import json +import logging +from typing import Any, Optional + +import aiohttp +import jwt +from azure.search.documents.aio import SearchClient +from azure.search.documents.indexes.models import SearchIndex +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from msal import ConfidentialClientApplication +from msal.token_cache import TokenCache +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_attempt, + wait_random_exponential, +) + + +# AuthError is raised when the authentication token sent by the client UI cannot be parsed or there is an authentication error accessing the graph API +class AuthError(Exception): + def __init__(self, error, status_code): + self.error = error + self.status_code = status_code + + def __str__(self) -> str: + return self.error or "" + + +class AuthenticationHelper: + scope: str = "https://graph.microsoft.com/.default" + + def __init__( + self, + search_index: Optional[SearchIndex], + use_authentication: bool, + server_app_id: Optional[str], + server_app_secret: Optional[str], + client_app_id: Optional[str], + tenant_id: Optional[str], + require_access_control: bool = False, + enable_global_documents: bool = False, + enable_unauthenticated_access: bool = False, + ): + self.use_authentication = use_authentication + self.server_app_id = server_app_id + self.server_app_secret = server_app_secret + self.client_app_id = client_app_id + self.tenant_id = tenant_id + self.authority = f"https://login.microsoftonline.com/{tenant_id}" + # Depending on if requestedAccessTokenVersion is 1 or 2, the issuer and audience of the token may be different + # See https://learn.microsoft.com/graph/api/resources/apiapplication + self.valid_issuers = [ + f"https://sts.windows.net/{tenant_id}/", + f"https://login.microsoftonline.com/{tenant_id}/v2.0", + ] + self.valid_audiences = [f"api://{server_app_id}", str(server_app_id)] + # See https://learn.microsoft.com/entra/identity-platform/access-tokens#validate-the-issuer for more information on token validation + self.key_url = f"{self.authority}/discovery/v2.0/keys" + + if self.use_authentication: + field_names = [field.name for field in search_index.fields] if search_index else [] + self.has_auth_fields = "oids" in field_names and "groups" in field_names + self.require_access_control = require_access_control + self.enable_global_documents = enable_global_documents + self.enable_unauthenticated_access = enable_unauthenticated_access + self.confidential_client = ConfidentialClientApplication( + server_app_id, authority=self.authority, client_credential=server_app_secret, token_cache=TokenCache() + ) + else: + self.has_auth_fields = False + self.require_access_control = False + self.enable_global_documents = True + self.enable_unauthenticated_access = True + + def get_auth_setup_for_client(self) -> dict[str, Any]: + # returns MSAL.js settings used by the client app + return { + "useLogin": self.use_authentication, # Whether or not login elements are enabled on the UI + "requireAccessControl": self.require_access_control, # Whether or not access control is required to access documents with access control lists + "enableUnauthenticatedAccess": self.enable_unauthenticated_access, # Whether or not the user can access the app without login + "msalConfig": { + "auth": { + "clientId": self.client_app_id, # Client app id used for login + "authority": self.authority, # Directory to use for login https://learn.microsoft.com/entra/identity-platform/msal-client-application-configuration#authority + "redirectUri": "/redirect", # Points to window.location.origin. You must register this URI on Azure Portal/App Registration. + "postLogoutRedirectUri": "/", # Indicates the page to navigate after logout. + "navigateToLoginRequestUrl": False, # If "true", will navigate back to the original request location before processing the auth code response. + }, + "cache": { + # Configures cache location. "sessionStorage" is more secure, but "localStorage" gives you SSO between tabs. + "cacheLocation": "localStorage", + # Set this to "true" if you are having issues on IE11 or Edge + "storeAuthStateInCookie": False, + }, + }, + "loginRequest": { + # Scopes you add here will be prompted for user consent during sign-in. + # By default, MSAL.js will add OIDC scopes (openid, profile, email) to any login request. + # For more information about OIDC scopes, visit: + # https://learn.microsoft.com/entra/identity-platform/permissions-consent-overview#openid-connect-scopes + "scopes": [".default"], + # Uncomment the following line to cause a consent dialog to appear on every login + # For more information, please visit https://learn.microsoft.com/entra/identity-platform/v2-oauth2-auth-code-flow#request-an-authorization-code + # "prompt": "consent" + }, + "tokenRequest": { + "scopes": [f"api://{self.server_app_id}/access_as_user"], + }, + } + + @staticmethod + def get_token_auth_header(headers: dict) -> str: + # Obtains the Access Token from the Authorization Header + auth = headers.get("Authorization") + if auth: + parts = auth.split() + + if parts[0].lower() != "bearer": + raise AuthError(error="Authorization header must start with Bearer", status_code=401) + elif len(parts) == 1: + raise AuthError(error="Token not found", status_code=401) + elif len(parts) > 2: + raise AuthError(error="Authorization header must be Bearer token", status_code=401) + + token = parts[1] + return token + + # App services built-in authentication passes the access token directly as a header + # To learn more, please visit https://learn.microsoft.com/azure/app-service/configure-authentication-oauth-tokens + token = headers.get("x-ms-token-aad-access-token") + if token: + return token + + raise AuthError(error="Authorization header is expected", status_code=401) + + def build_security_filters(self, overrides: dict[str, Any], auth_claims: dict[str, Any]): + # Build different permutations of the oid or groups security filter using OData filters + # https://learn.microsoft.com/azure/search/search-security-trimming-for-azure-search + # https://learn.microsoft.com/azure/search/search-query-odata-filter + use_oid_security_filter = self.require_access_control or overrides.get("use_oid_security_filter") + use_groups_security_filter = self.require_access_control or overrides.get("use_groups_security_filter") + + if (use_oid_security_filter or use_groups_security_filter) and not self.has_auth_fields: + raise AuthError( + error="oids and groups must be defined in the search index to use authentication", status_code=400 + ) + + oid_security_filter = ( + "oids/any(g:search.in(g, '{}'))".format(auth_claims.get("oid", "")) if use_oid_security_filter else None + ) + groups_security_filter = ( + "groups/any(g:search.in(g, '{}'))".format(", ".join(auth_claims.get("groups", []))) + if use_groups_security_filter + else None + ) + + # If only one security filter is specified, use that filter + # If both security filters are specified, combine them with "or" so only 1 security filter needs to pass + # If no security filters are specified, don't return any filter + security_filter = None + if oid_security_filter and not groups_security_filter: + security_filter = f"{oid_security_filter}" + elif groups_security_filter and not oid_security_filter: + security_filter = f"{groups_security_filter}" + elif oid_security_filter and groups_security_filter: + security_filter = f"({oid_security_filter} or {groups_security_filter})" + + # If global documents are allowed, append the public global filter + if self.enable_global_documents: + global_documents_filter = "(not oids/any() and not groups/any())" + if security_filter: + security_filter = f"({security_filter} or {global_documents_filter})" + + return security_filter + + @staticmethod + async def list_groups(graph_resource_access_token: dict) -> list[str]: + headers = {"Authorization": "Bearer " + graph_resource_access_token["access_token"]} + groups = [] + async with aiohttp.ClientSession(headers=headers) as session: + resp_json = None + resp_status = None + async with session.get(url="https://graph.microsoft.com/v1.0/me/transitiveMemberOf?$select=id") as resp: + resp_json = await resp.json() + resp_status = resp.status + if resp_status != 200: + raise AuthError(error=json.dumps(resp_json), status_code=resp_status) + + while resp_status == 200: + value = resp_json["value"] + for group in value: + groups.append(group["id"]) + next_link = resp_json.get("@odata.nextLink") + if next_link: + async with session.get(url=next_link) as resp: + resp_json = await resp.json() + resp_status = resp.status + else: + break + if resp_status != 200: + raise AuthError(error=json.dumps(resp_json), status_code=resp_status) + + return groups + + async def get_auth_claims_if_enabled(self, headers: dict) -> dict[str, Any]: + if not self.use_authentication: + return {} + try: + # Read the authentication token from the authorization header and exchange it using the On Behalf Of Flow + # The scope is set to the Microsoft Graph API, which may need to be called for more authorization information + # https://learn.microsoft.com/entra/identity-platform/v2-oauth2-on-behalf-of-flow + auth_token = AuthenticationHelper.get_token_auth_header(headers) + # Validate the token before use + await self.validate_access_token(auth_token) + + # Use the on-behalf-of-flow to acquire another token for use with Microsoft Graph + # See https://learn.microsoft.com/entra/identity-platform/v2-oauth2-on-behalf-of-flow for more information + graph_resource_access_token = self.confidential_client.acquire_token_on_behalf_of( + user_assertion=auth_token, scopes=["https://graph.microsoft.com/.default"] + ) + if "error" in graph_resource_access_token: + raise AuthError(error=str(graph_resource_access_token), status_code=401) + + # Read the claims from the response. The oid and groups claims are used for security filtering + # https://learn.microsoft.com/entra/identity-platform/id-token-claims-reference + id_token_claims = graph_resource_access_token["id_token_claims"] + auth_claims = {"oid": id_token_claims["oid"], "groups": id_token_claims.get("groups", [])} + + # A groups claim may have been omitted either because it was not added in the application manifest for the API application, + # or a groups overage claim may have been emitted. + # https://learn.microsoft.com/entra/identity-platform/id-token-claims-reference#groups-overage-claim + missing_groups_claim = "groups" not in id_token_claims + has_group_overage_claim = ( + missing_groups_claim + and "_claim_names" in id_token_claims + and "groups" in id_token_claims["_claim_names"] + ) + if missing_groups_claim or has_group_overage_claim: + # Read the user's groups from Microsoft Graph + auth_claims["groups"] = await AuthenticationHelper.list_groups(graph_resource_access_token) + return auth_claims + except AuthError as e: + logging.exception("Exception getting authorization information - " + json.dumps(e.error)) + if self.require_access_control and not self.enable_unauthenticated_access: + raise + return {} + except Exception: + logging.exception("Exception getting authorization information") + if self.require_access_control and not self.enable_unauthenticated_access: + raise + return {} + + async def check_path_auth(self, path: str, auth_claims: dict[str, Any], search_client: SearchClient) -> bool: + # Start with the standard security filter for all queries + security_filter = self.build_security_filters(overrides={}, auth_claims=auth_claims) + # If there was no security filter or no path, then the path is allowed + if not security_filter or len(path) == 0: + return True + + # Remove any fragment string from the path before checking + fragment_index = path.find("#") + if fragment_index != -1: + path = path[:fragment_index] + + # Filter down to only chunks that are from the specific source file + # Sourcepage is used for GPT-4V + # Replace ' with '' to escape the single quote for the filter + # https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax#escaping-special-characters-in-string-constants + path_for_filter = path.replace("'", "''") + filter = f"{security_filter} and ((sourcefile eq '{path_for_filter}') or (sourcepage eq '{path_for_filter}'))" + + # If the filter returns any results, the user is allowed to access the document + # Otherwise, access is denied + results = await search_client.search(search_text="*", top=1, filter=filter) + allowed = False + async for _ in results: + allowed = True + break + + return allowed + + async def create_pem_format(self, jwks, token): + unverified_header = jwt.get_unverified_header(token) + for key in jwks["keys"]: + if key["kid"] == unverified_header["kid"]: + # Construct the RSA public key + public_numbers = rsa.RSAPublicNumbers( + e=int.from_bytes(base64.urlsafe_b64decode(key["e"] + "=="), byteorder="big"), + n=int.from_bytes(base64.urlsafe_b64decode(key["n"] + "=="), byteorder="big"), + ) + public_key = public_numbers.public_key() + + # Convert to PEM format + pem_key = public_key.public_bytes( + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + rsa_key = pem_key + return rsa_key + + # See https://github.com/Azure-Samples/ms-identity-python-on-behalf-of/blob/939be02b11f1604814532fdacc2c2eccd198b755/FlaskAPI/helpers/authorization.py#L44 + async def validate_access_token(self, token: str): + """ + Validate an access token is issued by Entra + """ + jwks = None + async for attempt in AsyncRetrying( + retry=retry_if_exception_type(AuthError), + wait=wait_random_exponential(min=15, max=60), + stop=stop_after_attempt(5), + ): + with attempt: + async with aiohttp.ClientSession() as session: + async with session.get(url=self.key_url) as resp: + resp_status = resp.status + if resp_status in [500, 502, 503, 504]: + raise AuthError( + error=f"Failed to get keys info: {await resp.text()}", status_code=resp_status + ) + jwks = await resp.json() + + if not jwks or "keys" not in jwks: + raise AuthError("Unable to get keys to validate auth token.", 401) + + rsa_key = None + issuer = None + audience = None + try: + unverified_claims = jwt.decode(token, options={"verify_signature": False}) + issuer = unverified_claims.get("iss") + audience = unverified_claims.get("aud") + rsa_key = await self.create_pem_format(jwks, token) + except jwt.PyJWTError as exc: + raise AuthError("Unable to parse authorization token.", 401) from exc + if not rsa_key: + raise AuthError("Unable to find appropriate key", 401) + + if issuer not in self.valid_issuers: + raise AuthError(f"Issuer {issuer} not in {','.join(self.valid_issuers)}", 401) + + if audience not in self.valid_audiences: + raise AuthError( + f"Audience {audience} not in {','.join(self.valid_audiences)}", + 401, + ) + + try: + jwt.decode(token, rsa_key, algorithms=["RS256"], audience=audience, issuer=issuer) + except jwt.ExpiredSignatureError as jwt_expired_exc: + raise AuthError("Token is expired", 401) from jwt_expired_exc + except (jwt.InvalidAudienceError, jwt.InvalidIssuerError) as jwt_claims_exc: + raise AuthError( + "Incorrect claims: please check the audience and issuer", + 401, + ) from jwt_claims_exc + except Exception as exc: + raise AuthError("Unable to parse authorization token.", 401) from exc diff --git a/app/backend/core/imageshelper.py b/app/backend/core/imageshelper.py new file mode 100644 index 0000000000..87e8b8970f --- /dev/null +++ b/app/backend/core/imageshelper.py @@ -0,0 +1,40 @@ +import base64 +import logging +import os +from typing import Optional + +from azure.core.exceptions import ResourceNotFoundError +from azure.storage.blob.aio import ContainerClient +from typing_extensions import Literal, Required, TypedDict + +from approaches.approach import Document + + +class ImageURL(TypedDict, total=False): + url: Required[str] + """Either a URL of the image or the base64 encoded image data.""" + + detail: Literal["auto", "low", "high"] + """Specifies the detail level of the image.""" + + +async def download_blob_as_base64(blob_container_client: ContainerClient, file_path: str) -> Optional[str]: + base_name, _ = os.path.splitext(file_path) + image_filename = base_name + ".png" + try: + blob = await blob_container_client.get_blob_client(image_filename).download_blob() + if not blob.properties: + logging.warning(f"No blob exists for {image_filename}") + return None + img = base64.b64encode(await blob.readall()).decode("utf-8") + return f"data:image/png;base64,{img}" + except ResourceNotFoundError: + logging.warning(f"No blob exists for {image_filename}") + return None + + +async def fetch_image(blob_container_client: ContainerClient, result: Document) -> Optional[str]: + if result.sourcepage: + img = await download_blob_as_base64(blob_container_client, result.sourcepage) + return img + return None diff --git a/app/backend/core/messagebuilder.py b/app/backend/core/messagebuilder.py deleted file mode 100644 index 9872927461..0000000000 --- a/app/backend/core/messagebuilder.py +++ /dev/null @@ -1,25 +0,0 @@ -from .modelhelper import num_tokens_from_messages - - -class MessageBuilder: - """ - A class for building and managing messages in a chat conversation. - Attributes: - message (list): A list of dictionaries representing chat messages. - model (str): The name of the ChatGPT model. - token_count (int): The total number of tokens in the conversation. - Methods: - __init__(self, system_content: str, chatgpt_model: str): Initializes the MessageBuilder instance. - append_message(self, role: str, content: str, index: int = 1): Appends a new message to the conversation. - """ - - def __init__(self, system_content: str, chatgpt_model: str): - self.messages = [{'role': 'system', 'content': system_content}] - self.model = chatgpt_model - self.token_length = num_tokens_from_messages( - self.messages[-1], self.model) - - def append_message(self, role: str, content: str, index: int = 1): - self.messages.insert(index, {'role': role, 'content': content}) - self.token_length += num_tokens_from_messages( - self.messages[index], self.model) diff --git a/app/backend/core/modelhelper.py b/app/backend/core/modelhelper.py deleted file mode 100644 index c23cd01a75..0000000000 --- a/app/backend/core/modelhelper.py +++ /dev/null @@ -1,53 +0,0 @@ -from __future__ import annotations - -import tiktoken - -MODELS_2_TOKEN_LIMITS = { - "gpt-35-turbo": 4000, - "gpt-3.5-turbo": 4000, - "gpt-35-turbo-16k": 16000, - "gpt-3.5-turbo-16k": 16000, - "gpt-4": 8100, - "gpt-4-32k": 32000 -} - -AOAI_2_OAI = { - "gpt-35-turbo": "gpt-3.5-turbo", - "gpt-35-turbo-16k": "gpt-3.5-turbo-16k" -} - - -def get_token_limit(model_id: str) -> int: - if model_id not in MODELS_2_TOKEN_LIMITS: - raise ValueError("Expected model gpt-35-turbo and above") - return MODELS_2_TOKEN_LIMITS.get(model_id) - - -def num_tokens_from_messages(message: dict[str, str], model: str) -> int: - """ - Calculate the number of tokens required to encode a message. - Args: - message (dict): The message to encode, represented as a dictionary. - model (str): The name of the model to use for encoding. - Returns: - int: The total number of tokens required to encode the message. - Example: - message = {'role': 'user', 'content': 'Hello, how are you?'} - model = 'gpt-3.5-turbo' - num_tokens_from_messages(message, model) - output: 11 - """ - encoding = tiktoken.encoding_for_model(get_oai_chatmodel_tiktok(model)) - num_tokens = 2 # For "role" and "content" keys - for key, value in message.items(): - num_tokens += len(encoding.encode(value)) - return num_tokens - - -def get_oai_chatmodel_tiktok(aoaimodel: str) -> str: - message = "Expected Azure OpenAI ChatGPT model name" - if aoaimodel == "" or aoaimodel is None: - raise ValueError(message) - if aoaimodel not in AOAI_2_OAI and aoaimodel not in MODELS_2_TOKEN_LIMITS: - raise ValueError(message) - return AOAI_2_OAI.get(aoaimodel) or aoaimodel diff --git a/app/backend/core/sessionhelper.py b/app/backend/core/sessionhelper.py new file mode 100644 index 0000000000..ddda8e03b7 --- /dev/null +++ b/app/backend/core/sessionhelper.py @@ -0,0 +1,12 @@ +import uuid +from typing import Union + + +def create_session_id( + config_chat_history_cosmos_enabled: bool, config_chat_history_browser_enabled: bool +) -> Union[str, None]: + if config_chat_history_cosmos_enabled: + return str(uuid.uuid4()) + if config_chat_history_browser_enabled: + return str(uuid.uuid4()) + return None diff --git a/app/backend/custom_uvicorn_worker.py b/app/backend/custom_uvicorn_worker.py new file mode 100644 index 0000000000..851be98240 --- /dev/null +++ b/app/backend/custom_uvicorn_worker.py @@ -0,0 +1,47 @@ +from uvicorn.workers import UvicornWorker + +logconfig_dict = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "default": { + "()": "uvicorn.logging.DefaultFormatter", + "format": "%(asctime)s - %(levelname)s - %(message)s", + }, + "access": { + "()": "uvicorn.logging.AccessFormatter", + "format": "%(asctime)s - %(message)s", + }, + }, + "handlers": { + "default": { + "formatter": "default", + "class": "logging.StreamHandler", + "stream": "ext://sys.stderr", + }, + "access": { + "formatter": "access", + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + }, + }, + "loggers": { + "root": {"handlers": ["default"]}, + "uvicorn.error": { + "level": "INFO", + "handlers": ["default"], + "propagate": False, + }, + "uvicorn.access": { + "level": "INFO", + "handlers": ["access"], + "propagate": False, + }, + }, +} + + +class CustomUvicornWorker(UvicornWorker): + CONFIG_KWARGS = { + "log_config": logconfig_dict, + } diff --git a/app/backend/data/employeeinfo.csv b/app/backend/data/employeeinfo.csv deleted file mode 100644 index b3a4c884bd..0000000000 --- a/app/backend/data/employeeinfo.csv +++ /dev/null @@ -1,4 +0,0 @@ -name,title,insurance,insurancegroup -Employee1,Program Manager,Northwind Health Plus,Family -Employee2,Software Engineer,Northwind Health Plus,Single -Employee3,Software Engineer,Northwind Health Standard,Family diff --git a/app/backend/decorators.py b/app/backend/decorators.py new file mode 100644 index 0000000000..6638767435 --- /dev/null +++ b/app/backend/decorators.py @@ -0,0 +1,58 @@ +import logging +from functools import wraps +from typing import Any, Callable, TypeVar, cast + +from quart import abort, current_app, request + +from config import CONFIG_AUTH_CLIENT, CONFIG_SEARCH_CLIENT +from core.authentication import AuthError +from error import error_response + + +def authenticated_path(route_fn: Callable[[str, dict[str, Any]], Any]): + """ + Decorator for routes that request a specific file that might require access control enforcement + """ + + @wraps(route_fn) + async def auth_handler(path=""): + # If authentication is enabled, validate the user can access the file + auth_helper = current_app.config[CONFIG_AUTH_CLIENT] + search_client = current_app.config[CONFIG_SEARCH_CLIENT] + authorized = False + try: + auth_claims = await auth_helper.get_auth_claims_if_enabled(request.headers) + authorized = await auth_helper.check_path_auth(path, auth_claims, search_client) + except AuthError: + abort(403) + except Exception as error: + logging.exception("Problem checking path auth %s", error) + return error_response(error, route="/content") + + if not authorized: + abort(403) + + return await route_fn(path, auth_claims) + + return auth_handler + + +_C = TypeVar("_C", bound=Callable[..., Any]) + + +def authenticated(route_fn: _C) -> _C: + """ + Decorator for routes that might require access control. Unpacks Authorization header information into an auth_claims dictionary + """ + + @wraps(route_fn) + async def auth_handler(*args, **kwargs): + auth_helper = current_app.config[CONFIG_AUTH_CLIENT] + try: + auth_claims = await auth_helper.get_auth_claims_if_enabled(request.headers) + except AuthError: + abort(403) + + return await route_fn(auth_claims, *args, **kwargs) + + return cast(_C, auth_handler) diff --git a/app/backend/error.py b/app/backend/error.py new file mode 100644 index 0000000000..0a21afe6b7 --- /dev/null +++ b/app/backend/error.py @@ -0,0 +1,27 @@ +import logging + +from openai import APIError +from quart import jsonify + +ERROR_MESSAGE = """The app encountered an error processing your request. +If you are an administrator of the app, view the full error in the logs. See aka.ms/appservice-logs for more information. +Error type: {error_type} +""" +ERROR_MESSAGE_FILTER = """Your message contains content that was flagged by the OpenAI content filter.""" + +ERROR_MESSAGE_LENGTH = """Your message exceeded the context length limit for this OpenAI model. Please shorten your message or change your settings to retrieve fewer search results.""" + + +def error_dict(error: Exception) -> dict: + if isinstance(error, APIError) and error.code == "content_filter": + return {"error": ERROR_MESSAGE_FILTER} + if isinstance(error, APIError) and error.code == "context_length_exceeded": + return {"error": ERROR_MESSAGE_LENGTH} + return {"error": ERROR_MESSAGE.format(error_type=type(error))} + + +def error_response(error: Exception, route: str, status_code: int = 500): + logging.exception("Exception in %s: %s", route, error) + if isinstance(error, APIError) and error.code == "content_filter": + status_code = 400 + return jsonify(error_dict(error)), status_code diff --git a/app/backend/gunicorn.conf.py b/app/backend/gunicorn.conf.py index e9fef7e083..9144e3cc00 100644 --- a/app/backend/gunicorn.conf.py +++ b/app/backend/gunicorn.conf.py @@ -1,12 +1,18 @@ import multiprocessing +import os max_requests = 1000 max_requests_jitter = 50 log_file = "-" bind = "0.0.0.0" +timeout = 230 +# https://learn.microsoft.com/troubleshoot/azure/app-service/web-apps-performance-faqs#why-does-my-request-time-out-after-230-seconds + num_cpus = multiprocessing.cpu_count() -workers = (num_cpus * 2) + 1 -threads = 1 if num_cpus == 1 else 2 -timeout = 600 -worker_class = "gthread" +if os.getenv("WEBSITE_SKU") == "LinuxFree": + # Free tier reports 2 CPUs but can't handle multiple workers + workers = 1 +else: + workers = (num_cpus * 2) + 1 +worker_class = "custom_uvicorn_worker.CustomUvicornWorker" diff --git a/app/backend/langchainadapters.py b/app/backend/langchainadapters.py deleted file mode 100644 index 53de384525..0000000000 --- a/app/backend/langchainadapters.py +++ /dev/null @@ -1,90 +0,0 @@ -from typing import Any, Dict, List, Optional, Union - -from langchain.callbacks.base import BaseCallbackHandler -from langchain.schema import AgentAction, AgentFinish, LLMResult - - -def ch(text: Union[str, object]) -> str: - s = text if isinstance(text, str) else str(text) - return s.replace("<", "<").replace(">", ">").replace("\r", "").replace("\n", "
") - -class HtmlCallbackHandler (BaseCallbackHandler): - html: str = "" - - def get_and_reset_log(self) -> str: - result = self.html - self.html = "" - return result - - def on_llm_start( - self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any - ) -> None: - """Print out the prompts.""" - self.html += "LLM prompts:
" + "
".join(ch(prompts)) + "
" - - def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None: - """Do nothing.""" - pass - - def on_llm_error(self, error: Exception, **kwargs: Any) -> None: - self.html += f"LLM error: {ch(error)}
" - - def on_chain_start( - self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any - ) -> None: - """Print out that we are entering a chain.""" - class_name = serialized["name"] - self.html += f"Entering chain: {ch(class_name)}
" - - def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None: - """Print out that we finished a chain.""" - self.html += "Finished chain
" - - def on_chain_error(self, error: Exception, **kwargs: Any) -> None: - self.html += f"Chain error: {ch(error)}
" - - def on_tool_start( - self, - serialized: Dict[str, Any], - input_str: str, - color: Optional[str] = None, - **kwargs: Any, - ) -> None: - """Print out the log in specified color.""" - pass - - def on_tool_end( - self, - output: str, - color: Optional[str] = None, - observation_prefix: Optional[str] = None, - llm_prefix: Optional[str] = None, - **kwargs: Any, - ) -> None: - """If not the final action, print out observation.""" - self.html += f"{ch(observation_prefix)}
{ch(output)}
{ch(llm_prefix)}
" - - def on_tool_error(self, error: Exception, **kwargs: Any) -> None: - self.html += f"Tool error: {ch(error)}
" - - def on_text( - self, - text: str, - color: Optional[str] = None, - **kwargs: Optional[str], - ) -> None: - """Run when agent ends.""" - self.html += f"{ch(text)}
" - - def on_agent_action( - self, - action: AgentAction, - color: Optional[str] = None, - **kwargs: Any) -> Any: - self.html += f"{ch(action.log)}
" - - def on_agent_finish( - self, finish: AgentFinish, color: Optional[str] = None, **kwargs: Any - ) -> None: - """Run on agent end.""" - self.html += f"{ch(finish.log)}
" diff --git a/app/backend/load_azd_env.py b/app/backend/load_azd_env.py new file mode 100644 index 0000000000..2f2db6aa8f --- /dev/null +++ b/app/backend/load_azd_env.py @@ -0,0 +1,29 @@ +import json +import logging +import os +import subprocess + +from dotenv import load_dotenv + +logger = logging.getLogger("scripts") + + +def load_azd_env(): + """Get path to current azd env file and load file using python-dotenv""" + result = subprocess.run("azd env list -o json", shell=True, capture_output=True, text=True) + if result.returncode != 0: + raise Exception("Error loading azd env") + env_json = json.loads(result.stdout) + env_file_path = None + for entry in env_json: + if entry["IsDefault"]: + env_file_path = entry["DotEnvPath"] + if not env_file_path: + raise Exception("No default azd env file found") + loading_mode = os.getenv("LOADING_MODE_FOR_AZD_ENV_VARS") or "override" + if loading_mode == "no-override": + logger.info("Loading azd env from %s, but not overriding existing environment variables", env_file_path) + load_dotenv(env_file_path, override=False) + else: + logger.info("Loading azd env from %s, which may override existing environment variables", env_file_path) + load_dotenv(env_file_path, override=True) diff --git a/app/backend/lookuptool.py b/app/backend/lookuptool.py deleted file mode 100644 index 93c3b87638..0000000000 --- a/app/backend/lookuptool.py +++ /dev/null @@ -1,22 +0,0 @@ -import csv -from pathlib import Path -from typing import Optional, Union - -from langchain.agents import Tool -from langchain.callbacks.manager import Callbacks - - -class CsvLookupTool(Tool): - data: dict[str, str] = {} - - def __init__(self, filename: Union[str, Path], key_field: str, name: str = "lookup", - description: str = "useful to look up details given an input key as opposite to searching data with an unstructured question", - callbacks: Callbacks = None): - super().__init__(name, self.lookup, description, callbacks=callbacks) - with open(filename, newline='') as csvfile: - reader = csv.DictReader(csvfile) - for row in reader: - self.data[row[key_field]] = "\n".join([f"{i}:{row[i]}" for i in row]) - - def lookup(self, key: str) -> Optional[str]: - return self.data.get(key, "") diff --git a/app/backend/main.py b/app/backend/main.py new file mode 100644 index 0000000000..0f2914a483 --- /dev/null +++ b/app/backend/main.py @@ -0,0 +1,12 @@ +import os + +from app import create_app +from load_azd_env import load_azd_env + +# WEBSITE_HOSTNAME is always set by App Service, RUNNING_IN_PRODUCTION is set in main.bicep +RUNNING_ON_AZURE = os.getenv("WEBSITE_HOSTNAME") is not None or os.getenv("RUNNING_IN_PRODUCTION") is not None + +if not RUNNING_ON_AZURE: + load_azd_env() + +app = create_app() diff --git a/app/backend/prepdocs.py b/app/backend/prepdocs.py new file mode 100644 index 0000000000..f03baac0dc --- /dev/null +++ b/app/backend/prepdocs.py @@ -0,0 +1,473 @@ +import argparse +import asyncio +import logging +import os +from typing import Optional, Union + +from azure.core.credentials import AzureKeyCredential +from azure.core.credentials_async import AsyncTokenCredential +from azure.identity.aio import AzureDeveloperCliCredential, get_bearer_token_provider +from rich.logging import RichHandler + +from load_azd_env import load_azd_env +from prepdocslib.blobmanager import BlobManager +from prepdocslib.csvparser import CsvParser +from prepdocslib.embeddings import ( + AzureOpenAIEmbeddingService, + ImageEmbeddings, + OpenAIEmbeddingService, +) +from prepdocslib.fileprocessor import FileProcessor +from prepdocslib.filestrategy import FileStrategy +from prepdocslib.htmlparser import LocalHTMLParser +from prepdocslib.integratedvectorizerstrategy import ( + IntegratedVectorizerStrategy, +) +from prepdocslib.jsonparser import JsonParser +from prepdocslib.listfilestrategy import ( + ADLSGen2ListFileStrategy, + ListFileStrategy, + LocalListFileStrategy, +) +from prepdocslib.parser import Parser +from prepdocslib.pdfparser import DocumentAnalysisParser, LocalPdfParser +from prepdocslib.strategy import DocumentAction, SearchInfo, Strategy +from prepdocslib.textparser import TextParser +from prepdocslib.textsplitter import SentenceTextSplitter, SimpleTextSplitter + +logger = logging.getLogger("scripts") + + +def clean_key_if_exists(key: Union[str, None]) -> Union[str, None]: + """Remove leading and trailing whitespace from a key if it exists. If the key is empty, return None.""" + if key is not None and key.strip() != "": + return key.strip() + return None + + +async def setup_search_info( + search_service: str, + index_name: str, + azure_credential: AsyncTokenCredential, + use_agentic_retrieval: Union[bool, None] = None, + azure_openai_endpoint: Union[str, None] = None, + agent_name: Union[str, None] = None, + agent_max_output_tokens: Union[int, None] = None, + azure_openai_searchagent_deployment: Union[str, None] = None, + azure_openai_searchagent_model: Union[str, None] = None, + search_key: Union[str, None] = None, +) -> SearchInfo: + search_creds: Union[AsyncTokenCredential, AzureKeyCredential] = ( + azure_credential if search_key is None else AzureKeyCredential(search_key) + ) + if use_agentic_retrieval and azure_openai_searchagent_model is None: + raise ValueError("Azure OpenAI SearchAgent model must be specified when using agentic retrieval.") + + return SearchInfo( + endpoint=f"https://{search_service}.search.windows.net/", + credential=search_creds, + index_name=index_name, + agent_name=agent_name, + agent_max_output_tokens=agent_max_output_tokens, + use_agentic_retrieval=use_agentic_retrieval, + azure_openai_endpoint=azure_openai_endpoint, + azure_openai_searchagent_model=azure_openai_searchagent_model, + azure_openai_searchagent_deployment=azure_openai_searchagent_deployment, + ) + + +def setup_blob_manager( + azure_credential: AsyncTokenCredential, + storage_account: str, + storage_container: str, + storage_resource_group: str, + subscription_id: str, + search_images: bool, + storage_key: Union[str, None] = None, +): + storage_creds: Union[AsyncTokenCredential, str] = azure_credential if storage_key is None else storage_key + return BlobManager( + endpoint=f"https://{storage_account}.blob.core.windows.net", + container=storage_container, + account=storage_account, + credential=storage_creds, + resourceGroup=storage_resource_group, + subscriptionId=subscription_id, + store_page_images=search_images, + ) + + +def setup_list_file_strategy( + azure_credential: AsyncTokenCredential, + local_files: Union[str, None], + datalake_storage_account: Union[str, None], + datalake_filesystem: Union[str, None], + datalake_path: Union[str, None], + datalake_key: Union[str, None], +): + list_file_strategy: ListFileStrategy + if datalake_storage_account: + if datalake_filesystem is None or datalake_path is None: + raise ValueError("DataLake file system and path are required when using Azure Data Lake Gen2") + adls_gen2_creds: Union[AsyncTokenCredential, str] = azure_credential if datalake_key is None else datalake_key + logger.info("Using Data Lake Gen2 Storage Account: %s", datalake_storage_account) + list_file_strategy = ADLSGen2ListFileStrategy( + data_lake_storage_account=datalake_storage_account, + data_lake_filesystem=datalake_filesystem, + data_lake_path=datalake_path, + credential=adls_gen2_creds, + ) + elif local_files: + logger.info("Using local files: %s", local_files) + list_file_strategy = LocalListFileStrategy(path_pattern=local_files) + else: + raise ValueError("Either local_files or datalake_storage_account must be provided.") + return list_file_strategy + + +def setup_embeddings_service( + azure_credential: AsyncTokenCredential, + openai_host: str, + openai_model_name: str, + openai_service: Union[str, None], + openai_custom_url: Union[str, None], + openai_deployment: Union[str, None], + openai_dimensions: int, + openai_api_version: str, + openai_key: Union[str, None], + openai_org: Union[str, None], + disable_vectors: bool = False, + disable_batch_vectors: bool = False, +): + if disable_vectors: + logger.info("Not setting up embeddings service") + return None + + if openai_host != "openai": + azure_open_ai_credential: Union[AsyncTokenCredential, AzureKeyCredential] = ( + azure_credential if openai_key is None else AzureKeyCredential(openai_key) + ) + return AzureOpenAIEmbeddingService( + open_ai_service=openai_service, + open_ai_custom_url=openai_custom_url, + open_ai_deployment=openai_deployment, + open_ai_model_name=openai_model_name, + open_ai_dimensions=openai_dimensions, + open_ai_api_version=openai_api_version, + credential=azure_open_ai_credential, + disable_batch=disable_batch_vectors, + ) + else: + if openai_key is None: + raise ValueError("OpenAI key is required when using the non-Azure OpenAI API") + return OpenAIEmbeddingService( + open_ai_model_name=openai_model_name, + open_ai_dimensions=openai_dimensions, + credential=openai_key, + organization=openai_org, + disable_batch=disable_batch_vectors, + ) + + +def setup_file_processors( + azure_credential: AsyncTokenCredential, + document_intelligence_service: Union[str, None], + document_intelligence_key: Union[str, None] = None, + local_pdf_parser: bool = False, + local_html_parser: bool = False, + search_images: bool = False, + use_content_understanding: bool = False, + content_understanding_endpoint: Union[str, None] = None, +): + sentence_text_splitter = SentenceTextSplitter() + + doc_int_parser: Optional[DocumentAnalysisParser] = None + # check if Azure Document Intelligence credentials are provided + if document_intelligence_service is not None: + documentintelligence_creds: Union[AsyncTokenCredential, AzureKeyCredential] = ( + azure_credential if document_intelligence_key is None else AzureKeyCredential(document_intelligence_key) + ) + doc_int_parser = DocumentAnalysisParser( + endpoint=f"https://{document_intelligence_service}.cognitiveservices.azure.com/", + credential=documentintelligence_creds, + use_content_understanding=use_content_understanding, + content_understanding_endpoint=content_understanding_endpoint, + ) + + pdf_parser: Optional[Parser] = None + if local_pdf_parser or document_intelligence_service is None: + pdf_parser = LocalPdfParser() + elif document_intelligence_service is not None: + pdf_parser = doc_int_parser + else: + logger.warning("No PDF parser available") + + html_parser: Optional[Parser] = None + if local_html_parser or document_intelligence_service is None: + html_parser = LocalHTMLParser() + elif document_intelligence_service is not None: + html_parser = doc_int_parser + else: + logger.warning("No HTML parser available") + + # These file formats can always be parsed: + file_processors = { + ".json": FileProcessor(JsonParser(), SimpleTextSplitter()), + ".md": FileProcessor(TextParser(), sentence_text_splitter), + ".txt": FileProcessor(TextParser(), sentence_text_splitter), + ".csv": FileProcessor(CsvParser(), sentence_text_splitter), + } + # These require either a Python package or Document Intelligence + if pdf_parser is not None: + file_processors.update({".pdf": FileProcessor(pdf_parser, sentence_text_splitter)}) + if html_parser is not None: + file_processors.update({".html": FileProcessor(html_parser, sentence_text_splitter)}) + # These file formats require Document Intelligence + if doc_int_parser is not None: + file_processors.update( + { + ".docx": FileProcessor(doc_int_parser, sentence_text_splitter), + ".pptx": FileProcessor(doc_int_parser, sentence_text_splitter), + ".xlsx": FileProcessor(doc_int_parser, sentence_text_splitter), + ".png": FileProcessor(doc_int_parser, sentence_text_splitter), + ".jpg": FileProcessor(doc_int_parser, sentence_text_splitter), + ".jpeg": FileProcessor(doc_int_parser, sentence_text_splitter), + ".tiff": FileProcessor(doc_int_parser, sentence_text_splitter), + ".bmp": FileProcessor(doc_int_parser, sentence_text_splitter), + ".heic": FileProcessor(doc_int_parser, sentence_text_splitter), + } + ) + return file_processors + + +def setup_image_embeddings_service( + azure_credential: AsyncTokenCredential, vision_endpoint: Union[str, None], search_images: bool +) -> Union[ImageEmbeddings, None]: + image_embeddings_service: Optional[ImageEmbeddings] = None + if search_images: + if vision_endpoint is None: + raise ValueError("A computer vision endpoint is required when GPT-4-vision is enabled.") + image_embeddings_service = ImageEmbeddings( + endpoint=vision_endpoint, + token_provider=get_bearer_token_provider(azure_credential, "https://cognitiveservices.azure.com/.default"), + ) + return image_embeddings_service + + +async def main(strategy: Strategy, setup_index: bool = True): + if setup_index: + await strategy.setup() + + await strategy.run() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Prepare documents by extracting content from PDFs, splitting content into sections, uploading to blob storage, and indexing in a search index." + ) + parser.add_argument("files", nargs="?", help="Files to be processed") + + parser.add_argument( + "--category", help="Value for the category field in the search index for all sections indexed in this run" + ) + parser.add_argument( + "--skipblobs", action="store_true", help="Skip uploading individual pages to Azure Blob Storage" + ) + parser.add_argument( + "--disablebatchvectors", action="store_true", help="Don't compute embeddings in batch for the sections" + ) + parser.add_argument( + "--remove", + action="store_true", + help="Remove references to this document from blob storage and the search index", + ) + parser.add_argument( + "--removeall", + action="store_true", + help="Remove all blobs from blob storage and documents from the search index", + ) + + # Optional key specification: + parser.add_argument( + "--searchkey", + required=False, + help="Optional. Use this Azure AI Search account key instead of the current user identity to login (use az login to set current user for Azure)", + ) + parser.add_argument( + "--storagekey", + required=False, + help="Optional. Use this Azure Blob Storage account key instead of the current user identity to login (use az login to set current user for Azure)", + ) + parser.add_argument( + "--datalakekey", required=False, help="Optional. Use this key when authenticating to Azure Data Lake Gen2" + ) + parser.add_argument( + "--documentintelligencekey", + required=False, + help="Optional. Use this Azure Document Intelligence account key instead of the current user identity to login (use az login to set current user for Azure)", + ) + parser.add_argument( + "--searchserviceassignedid", + required=False, + help="Search service system assigned Identity (Managed identity) (used for integrated vectorization)", + ) + + parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output") + args = parser.parse_args() + + if args.verbose: + logging.basicConfig(format="%(message)s", datefmt="[%X]", handlers=[RichHandler(rich_tracebacks=True)]) + # We only set the level to INFO for our logger, + # to avoid seeing the noisy INFO level logs from the Azure SDKs + logger.setLevel(logging.DEBUG) + + load_azd_env() + + if os.getenv("AZURE_PUBLIC_NETWORK_ACCESS") == "Disabled": + logger.error("AZURE_PUBLIC_NETWORK_ACCESS is set to Disabled. Exiting.") + exit(0) + + use_int_vectorization = os.getenv("USE_FEATURE_INT_VECTORIZATION", "").lower() == "true" + use_gptvision = os.getenv("USE_GPT4V", "").lower() == "true" + use_acls = os.getenv("AZURE_ENFORCE_ACCESS_CONTROL") is not None + dont_use_vectors = os.getenv("USE_VECTORS", "").lower() == "false" + use_agentic_retrieval = os.getenv("USE_AGENTIC_RETRIEVAL", "").lower() == "true" + use_content_understanding = os.getenv("USE_MEDIA_DESCRIBER_AZURE_CU", "").lower() == "true" + + # Use the current user identity to connect to Azure services. See infra/main.bicep for role assignments. + if tenant_id := os.getenv("AZURE_TENANT_ID"): + logger.info("Connecting to Azure services using the azd credential for tenant %s", tenant_id) + azd_credential = AzureDeveloperCliCredential(tenant_id=tenant_id, process_timeout=60) + else: + logger.info("Connecting to Azure services using the azd credential for home tenant") + azd_credential = AzureDeveloperCliCredential(process_timeout=60) + + if args.removeall: + document_action = DocumentAction.RemoveAll + elif args.remove: + document_action = DocumentAction.Remove + else: + document_action = DocumentAction.Add + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + openai_host = os.environ["OPENAI_HOST"] + # Check for incompatibility + # if openai host is not azure + if openai_host != "azure" and use_agentic_retrieval: + raise Exception("Agentic retrieval requires an Azure OpenAI chat completion service") + + search_info = loop.run_until_complete( + setup_search_info( + search_service=os.environ["AZURE_SEARCH_SERVICE"], + index_name=os.environ["AZURE_SEARCH_INDEX"], + use_agentic_retrieval=use_agentic_retrieval, + agent_name=os.getenv("AZURE_SEARCH_AGENT"), + agent_max_output_tokens=int(os.getenv("AZURE_SEARCH_AGENT_MAX_OUTPUT_TOKENS", 10000)), + azure_openai_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"], + azure_openai_searchagent_deployment=os.getenv("AZURE_OPENAI_SEARCHAGENT_DEPLOYMENT"), + azure_openai_searchagent_model=os.getenv("AZURE_OPENAI_SEARCHAGENT_MODEL"), + azure_credential=azd_credential, + search_key=clean_key_if_exists(args.searchkey), + ) + ) + blob_manager = setup_blob_manager( + azure_credential=azd_credential, + storage_account=os.environ["AZURE_STORAGE_ACCOUNT"], + storage_container=os.environ["AZURE_STORAGE_CONTAINER"], + storage_resource_group=os.environ["AZURE_STORAGE_RESOURCE_GROUP"], + subscription_id=os.environ["AZURE_SUBSCRIPTION_ID"], + search_images=use_gptvision, + storage_key=clean_key_if_exists(args.storagekey), + ) + list_file_strategy = setup_list_file_strategy( + azure_credential=azd_credential, + local_files=args.files, + datalake_storage_account=os.getenv("AZURE_ADLS_GEN2_STORAGE_ACCOUNT"), + datalake_filesystem=os.getenv("AZURE_ADLS_GEN2_FILESYSTEM"), + datalake_path=os.getenv("AZURE_ADLS_GEN2_FILESYSTEM_PATH"), + datalake_key=clean_key_if_exists(args.datalakekey), + ) + + openai_host = os.environ["OPENAI_HOST"] + openai_key = None + if os.getenv("AZURE_OPENAI_API_KEY_OVERRIDE"): + openai_key = os.getenv("AZURE_OPENAI_API_KEY_OVERRIDE") + elif not openai_host.startswith("azure") and os.getenv("OPENAI_API_KEY"): + openai_key = os.getenv("OPENAI_API_KEY") + + openai_dimensions = 1536 + if os.getenv("AZURE_OPENAI_EMB_DIMENSIONS"): + openai_dimensions = int(os.environ["AZURE_OPENAI_EMB_DIMENSIONS"]) + openai_embeddings_service = setup_embeddings_service( + azure_credential=azd_credential, + openai_host=openai_host, + openai_model_name=os.environ["AZURE_OPENAI_EMB_MODEL_NAME"], + openai_service=os.getenv("AZURE_OPENAI_SERVICE"), + openai_custom_url=os.getenv("AZURE_OPENAI_CUSTOM_URL"), + openai_deployment=os.getenv("AZURE_OPENAI_EMB_DEPLOYMENT"), + # https://learn.microsoft.com/azure/ai-services/openai/api-version-deprecation#latest-ga-api-release + openai_api_version=os.getenv("AZURE_OPENAI_API_VERSION") or "2024-06-01", + openai_dimensions=openai_dimensions, + openai_key=clean_key_if_exists(openai_key), + openai_org=os.getenv("OPENAI_ORGANIZATION"), + disable_vectors=dont_use_vectors, + disable_batch_vectors=args.disablebatchvectors, + ) + + ingestion_strategy: Strategy + if use_int_vectorization: + + if not openai_embeddings_service or not isinstance(openai_embeddings_service, AzureOpenAIEmbeddingService): + raise Exception("Integrated vectorization strategy requires an Azure OpenAI embeddings service") + + ingestion_strategy = IntegratedVectorizerStrategy( + search_info=search_info, + list_file_strategy=list_file_strategy, + blob_manager=blob_manager, + document_action=document_action, + embeddings=openai_embeddings_service, + search_field_name_embedding=os.environ["AZURE_SEARCH_FIELD_NAME_EMBEDDING"], + subscription_id=os.environ["AZURE_SUBSCRIPTION_ID"], + search_service_user_assigned_id=args.searchserviceassignedid, + search_analyzer_name=os.getenv("AZURE_SEARCH_ANALYZER_NAME"), + use_acls=use_acls, + category=args.category, + ) + else: + file_processors = setup_file_processors( + azure_credential=azd_credential, + document_intelligence_service=os.getenv("AZURE_DOCUMENTINTELLIGENCE_SERVICE"), + document_intelligence_key=clean_key_if_exists(args.documentintelligencekey), + local_pdf_parser=os.getenv("USE_LOCAL_PDF_PARSER") == "true", + local_html_parser=os.getenv("USE_LOCAL_HTML_PARSER") == "true", + search_images=use_gptvision, + use_content_understanding=use_content_understanding, + content_understanding_endpoint=os.getenv("AZURE_CONTENTUNDERSTANDING_ENDPOINT"), + ) + image_embeddings_service = setup_image_embeddings_service( + azure_credential=azd_credential, + vision_endpoint=os.getenv("AZURE_VISION_ENDPOINT"), + search_images=use_gptvision, + ) + + ingestion_strategy = FileStrategy( + search_info=search_info, + list_file_strategy=list_file_strategy, + blob_manager=blob_manager, + file_processors=file_processors, + document_action=document_action, + embeddings=openai_embeddings_service, + image_embeddings=image_embeddings_service, + search_analyzer_name=os.getenv("AZURE_SEARCH_ANALYZER_NAME"), + # Default to the previous field names for backward compatibility + search_field_name_embedding=os.getenv("AZURE_SEARCH_FIELD_NAME_EMBEDDING", "embedding"), + use_acls=use_acls, + category=args.category, + use_content_understanding=use_content_understanding, + content_understanding_endpoint=os.getenv("AZURE_CONTENTUNDERSTANDING_ENDPOINT"), + ) + + loop.run_until_complete(main(ingestion_strategy, setup_index=not args.remove and not args.removeall)) + loop.close() diff --git a/app/backend/prepdocslib/__init__.py b/app/backend/prepdocslib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/app/backend/prepdocslib/blobmanager.py b/app/backend/prepdocslib/blobmanager.py new file mode 100644 index 0000000000..d5c21e0d41 --- /dev/null +++ b/app/backend/prepdocslib/blobmanager.py @@ -0,0 +1,178 @@ +import datetime +import io +import logging +import os +import re +from typing import Optional, Union + +import pymupdf +from azure.core.credentials_async import AsyncTokenCredential +from azure.storage.blob import ( + BlobSasPermissions, + UserDelegationKey, + generate_blob_sas, +) +from azure.storage.blob.aio import BlobServiceClient, ContainerClient +from PIL import Image, ImageDraw, ImageFont +from pypdf import PdfReader + +from .listfilestrategy import File + +logger = logging.getLogger("scripts") + + +class BlobManager: + """ + Class to manage uploading and deleting blobs containing citation information from a blob storage account + """ + + def __init__( + self, + endpoint: str, + container: str, + account: str, + credential: Union[AsyncTokenCredential, str], + resourceGroup: str, + subscriptionId: str, + store_page_images: bool = False, + ): + self.endpoint = endpoint + self.credential = credential + self.account = account + self.container = container + self.store_page_images = store_page_images + self.resourceGroup = resourceGroup + self.subscriptionId = subscriptionId + self.user_delegation_key: Optional[UserDelegationKey] = None + + async def upload_blob(self, file: File) -> Optional[list[str]]: + async with BlobServiceClient( + account_url=self.endpoint, credential=self.credential, max_single_put_size=4 * 1024 * 1024 + ) as service_client, service_client.get_container_client(self.container) as container_client: + if not await container_client.exists(): + await container_client.create_container() + + # Re-open and upload the original file + if file.url is None: + with open(file.content.name, "rb") as reopened_file: + blob_name = BlobManager.blob_name_from_file_name(file.content.name) + logger.info("Uploading blob for whole file -> %s", blob_name) + blob_client = await container_client.upload_blob(blob_name, reopened_file, overwrite=True) + file.url = blob_client.url + + if self.store_page_images: + if os.path.splitext(file.content.name)[1].lower() == ".pdf": + return await self.upload_pdf_blob_images(service_client, container_client, file) + else: + logger.info("File %s is not a PDF, skipping image upload", file.content.name) + + return None + + def get_managedidentity_connectionstring(self): + return f"ResourceId=/subscriptions/{self.subscriptionId}/resourceGroups/{self.resourceGroup}/providers/Microsoft.Storage/storageAccounts/{self.account};" + + async def upload_pdf_blob_images( + self, service_client: BlobServiceClient, container_client: ContainerClient, file: File + ) -> list[str]: + with open(file.content.name, "rb") as reopened_file: + reader = PdfReader(reopened_file) + page_count = len(reader.pages) + doc = pymupdf.open(file.content.name) + sas_uris = [] + start_time = datetime.datetime.now(datetime.timezone.utc) + expiry_time = start_time + datetime.timedelta(days=1) + + font = None + try: + font = ImageFont.truetype("arial.ttf", 20) + except OSError: + try: + font = ImageFont.truetype("/usr/share/fonts/truetype/freefont/FreeMono.ttf", 20) + except OSError: + logger.info("Unable to find arial.ttf or FreeMono.ttf, using default font") + + for i in range(page_count): + blob_name = BlobManager.blob_image_name_from_file_page(file.content.name, i) + logger.info("Converting page %s to image and uploading -> %s", i, blob_name) + + doc = pymupdf.open(file.content.name) + page = doc.load_page(i) + pix = page.get_pixmap() + original_img = Image.frombytes("RGB", [pix.width, pix.height], pix.samples) # type: ignore + + # Create a new image with additional space for text + text_height = 40 # Height of the text area + new_img = Image.new("RGB", (original_img.width, original_img.height + text_height), "white") + + # Paste the original image onto the new image + new_img.paste(original_img, (0, text_height)) + + # Draw the text on the white area + draw = ImageDraw.Draw(new_img) + text = f"SourceFileName:{blob_name}" + + # 10 pixels from the top and left of the image + x = 10 + y = 10 + draw.text((x, y), text, font=font, fill="black") + + output = io.BytesIO() + new_img.save(output, format="PNG") + output.seek(0) + + blob_client = await container_client.upload_blob(blob_name, output, overwrite=True) + if not self.user_delegation_key: + self.user_delegation_key = await service_client.get_user_delegation_key(start_time, expiry_time) + + if blob_client.account_name is not None: + sas_token = generate_blob_sas( + account_name=blob_client.account_name, + container_name=blob_client.container_name, + blob_name=blob_client.blob_name, + user_delegation_key=self.user_delegation_key, + permission=BlobSasPermissions(read=True), + expiry=expiry_time, + start=start_time, + ) + sas_uris.append(f"{blob_client.url}?{sas_token}") + + return sas_uris + + async def remove_blob(self, path: Optional[str] = None): + async with BlobServiceClient( + account_url=self.endpoint, credential=self.credential + ) as service_client, service_client.get_container_client(self.container) as container_client: + if not await container_client.exists(): + return + if path is None: + prefix = None + blobs = container_client.list_blob_names() + else: + prefix = os.path.splitext(os.path.basename(path))[0] + blobs = container_client.list_blob_names(name_starts_with=os.path.splitext(os.path.basename(prefix))[0]) + async for blob_path in blobs: + # This still supports PDFs split into individual pages, but we could remove in future to simplify code + if ( + prefix is not None + and ( + not re.match(rf"{prefix}-\d+\.pdf", blob_path) or not re.match(rf"{prefix}-\d+\.png", blob_path) + ) + ) or (path is not None and blob_path == os.path.basename(path)): + continue + logger.info("Removing blob %s", blob_path) + await container_client.delete_blob(blob_path) + + @classmethod + def sourcepage_from_file_page(cls, filename, page=0) -> str: + if os.path.splitext(filename)[1].lower() == ".pdf": + return f"{os.path.basename(filename)}#page={page+1}" + else: + return os.path.basename(filename) + + @classmethod + def blob_image_name_from_file_page(cls, filename, page=0) -> str: + return os.path.splitext(os.path.basename(filename))[0] + f"-{page+1}" + ".png" + + @classmethod + def blob_name_from_file_name(cls, filename) -> str: + return os.path.basename(filename) diff --git a/app/backend/prepdocslib/csvparser.py b/app/backend/prepdocslib/csvparser.py new file mode 100644 index 0000000000..7bf5e1ad75 --- /dev/null +++ b/app/backend/prepdocslib/csvparser.py @@ -0,0 +1,32 @@ +import csv +from collections.abc import AsyncGenerator +from typing import IO + +from .page import Page +from .parser import Parser + + +class CsvParser(Parser): + """ + Concrete parser that can parse CSV into Page objects. Each row becomes a Page object. + """ + + async def parse(self, content: IO) -> AsyncGenerator[Page, None]: + # Check if content is in bytes (binary file) and decode to string + content_str: str + if isinstance(content, (bytes, bytearray)): + content_str = content.decode("utf-8") + elif hasattr(content, "read"): # Handle BufferedReader + content_str = content.read().decode("utf-8") + + # Create a CSV reader from the text content + reader = csv.reader(content_str.splitlines()) + offset = 0 + + # Skip the header row + next(reader, None) + + for i, row in enumerate(reader): + page_text = ",".join(row) + yield Page(i, offset, page_text) + offset += len(page_text) + 1 # Account for newline character diff --git a/app/backend/prepdocslib/embeddings.py b/app/backend/prepdocslib/embeddings.py new file mode 100644 index 0000000000..df56f39c08 --- /dev/null +++ b/app/backend/prepdocslib/embeddings.py @@ -0,0 +1,263 @@ +import logging +from abc import ABC +from collections.abc import Awaitable +from typing import Callable, Optional, Union +from urllib.parse import urljoin + +import aiohttp +import tiktoken +from azure.core.credentials import AzureKeyCredential +from azure.core.credentials_async import AsyncTokenCredential +from azure.identity.aio import get_bearer_token_provider +from openai import AsyncAzureOpenAI, AsyncOpenAI, RateLimitError +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_attempt, + wait_random_exponential, +) +from typing_extensions import TypedDict + +logger = logging.getLogger("scripts") + + +class EmbeddingBatch: + """ + Represents a batch of text that is going to be embedded + """ + + def __init__(self, texts: list[str], token_length: int): + self.texts = texts + self.token_length = token_length + + +class ExtraArgs(TypedDict, total=False): + dimensions: int + + +class OpenAIEmbeddings(ABC): + """ + Contains common logic across both OpenAI and Azure OpenAI embedding services + Can split source text into batches for more efficient embedding calls + """ + + SUPPORTED_BATCH_AOAI_MODEL = { + "text-embedding-ada-002": {"token_limit": 8100, "max_batch_size": 16}, + "text-embedding-3-small": {"token_limit": 8100, "max_batch_size": 16}, + "text-embedding-3-large": {"token_limit": 8100, "max_batch_size": 16}, + } + SUPPORTED_DIMENSIONS_MODEL = { + "text-embedding-ada-002": False, + "text-embedding-3-small": True, + "text-embedding-3-large": True, + } + + def __init__(self, open_ai_model_name: str, open_ai_dimensions: int, disable_batch: bool = False): + self.open_ai_model_name = open_ai_model_name + self.open_ai_dimensions = open_ai_dimensions + self.disable_batch = disable_batch + + async def create_client(self) -> AsyncOpenAI: + raise NotImplementedError + + def before_retry_sleep(self, retry_state): + logger.info("Rate limited on the OpenAI embeddings API, sleeping before retrying...") + + def calculate_token_length(self, text: str): + encoding = tiktoken.encoding_for_model(self.open_ai_model_name) + return len(encoding.encode(text)) + + def split_text_into_batches(self, texts: list[str]) -> list[EmbeddingBatch]: + batch_info = OpenAIEmbeddings.SUPPORTED_BATCH_AOAI_MODEL.get(self.open_ai_model_name) + if not batch_info: + raise NotImplementedError( + f"Model {self.open_ai_model_name} is not supported with batch embedding operations" + ) + + batch_token_limit = batch_info["token_limit"] + batch_max_size = batch_info["max_batch_size"] + batches: list[EmbeddingBatch] = [] + batch: list[str] = [] + batch_token_length = 0 + for text in texts: + text_token_length = self.calculate_token_length(text) + if batch_token_length + text_token_length >= batch_token_limit and len(batch) > 0: + batches.append(EmbeddingBatch(batch, batch_token_length)) + batch = [] + batch_token_length = 0 + + batch.append(text) + batch_token_length = batch_token_length + text_token_length + if len(batch) == batch_max_size: + batches.append(EmbeddingBatch(batch, batch_token_length)) + batch = [] + batch_token_length = 0 + + if len(batch) > 0: + batches.append(EmbeddingBatch(batch, batch_token_length)) + + return batches + + async def create_embedding_batch(self, texts: list[str], dimensions_args: ExtraArgs) -> list[list[float]]: + batches = self.split_text_into_batches(texts) + embeddings = [] + client = await self.create_client() + for batch in batches: + async for attempt in AsyncRetrying( + retry=retry_if_exception_type(RateLimitError), + wait=wait_random_exponential(min=15, max=60), + stop=stop_after_attempt(15), + before_sleep=self.before_retry_sleep, + ): + with attempt: + emb_response = await client.embeddings.create( + model=self.open_ai_model_name, input=batch.texts, **dimensions_args + ) + embeddings.extend([data.embedding for data in emb_response.data]) + logger.info( + "Computed embeddings in batch. Batch size: %d, Token count: %d", + len(batch.texts), + batch.token_length, + ) + + return embeddings + + async def create_embedding_single(self, text: str, dimensions_args: ExtraArgs) -> list[float]: + client = await self.create_client() + async for attempt in AsyncRetrying( + retry=retry_if_exception_type(RateLimitError), + wait=wait_random_exponential(min=15, max=60), + stop=stop_after_attempt(15), + before_sleep=self.before_retry_sleep, + ): + with attempt: + emb_response = await client.embeddings.create( + model=self.open_ai_model_name, input=text, **dimensions_args + ) + logger.info("Computed embedding for text section. Character count: %d", len(text)) + + return emb_response.data[0].embedding + + async def create_embeddings(self, texts: list[str]) -> list[list[float]]: + + dimensions_args: ExtraArgs = ( + {"dimensions": self.open_ai_dimensions} + if OpenAIEmbeddings.SUPPORTED_DIMENSIONS_MODEL.get(self.open_ai_model_name) + else {} + ) + + if not self.disable_batch and self.open_ai_model_name in OpenAIEmbeddings.SUPPORTED_BATCH_AOAI_MODEL: + return await self.create_embedding_batch(texts, dimensions_args) + + return [await self.create_embedding_single(text, dimensions_args) for text in texts] + + +class AzureOpenAIEmbeddingService(OpenAIEmbeddings): + """ + Class for using Azure OpenAI embeddings + To learn more please visit https://learn.microsoft.com/azure/ai-services/openai/concepts/understand-embeddings + """ + + def __init__( + self, + open_ai_service: Union[str, None], + open_ai_deployment: Union[str, None], + open_ai_model_name: str, + open_ai_dimensions: int, + open_ai_api_version: str, + credential: Union[AsyncTokenCredential, AzureKeyCredential], + open_ai_custom_url: Union[str, None] = None, + disable_batch: bool = False, + ): + super().__init__(open_ai_model_name, open_ai_dimensions, disable_batch) + self.open_ai_service = open_ai_service + if open_ai_service: + self.open_ai_endpoint = f"https://{open_ai_service}.openai.azure.com" + elif open_ai_custom_url: + self.open_ai_endpoint = open_ai_custom_url + else: + raise ValueError("Either open_ai_service or open_ai_custom_url must be provided") + self.open_ai_deployment = open_ai_deployment + self.open_ai_api_version = open_ai_api_version + self.credential = credential + + async def create_client(self) -> AsyncOpenAI: + class AuthArgs(TypedDict, total=False): + api_key: str + azure_ad_token_provider: Callable[[], Union[str, Awaitable[str]]] + + auth_args = AuthArgs() + if isinstance(self.credential, AzureKeyCredential): + auth_args["api_key"] = self.credential.key + elif isinstance(self.credential, AsyncTokenCredential): + auth_args["azure_ad_token_provider"] = get_bearer_token_provider( + self.credential, "https://cognitiveservices.azure.com/.default" + ) + else: + raise TypeError("Invalid credential type") + + return AsyncAzureOpenAI( + azure_endpoint=self.open_ai_endpoint, + azure_deployment=self.open_ai_deployment, + api_version=self.open_ai_api_version, + **auth_args, + ) + + +class OpenAIEmbeddingService(OpenAIEmbeddings): + """ + Class for using OpenAI embeddings + To learn more please visit https://platform.openai.com/docs/guides/embeddings + """ + + def __init__( + self, + open_ai_model_name: str, + open_ai_dimensions: int, + credential: str, + organization: Optional[str] = None, + disable_batch: bool = False, + ): + super().__init__(open_ai_model_name, open_ai_dimensions, disable_batch) + self.credential = credential + self.organization = organization + + async def create_client(self) -> AsyncOpenAI: + return AsyncOpenAI(api_key=self.credential, organization=self.organization) + + +class ImageEmbeddings: + """ + Class for using image embeddings from Azure AI Vision + To learn more, please visit https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/image-retrieval#call-the-vectorize-image-api + """ + + def __init__(self, endpoint: str, token_provider: Callable[[], Awaitable[str]]): + self.token_provider = token_provider + self.endpoint = endpoint + + async def create_embeddings(self, blob_urls: list[str]) -> list[list[float]]: + endpoint = urljoin(self.endpoint, "computervision/retrieval:vectorizeImage") + headers = {"Content-Type": "application/json"} + params = {"api-version": "2024-02-01", "model-version": "2023-04-15"} + headers["Authorization"] = "Bearer " + await self.token_provider() + + embeddings: list[list[float]] = [] + async with aiohttp.ClientSession(headers=headers) as session: + for blob_url in blob_urls: + async for attempt in AsyncRetrying( + retry=retry_if_exception_type(Exception), + wait=wait_random_exponential(min=15, max=60), + stop=stop_after_attempt(15), + before_sleep=self.before_retry_sleep, + ): + with attempt: + body = {"url": blob_url} + async with session.post(url=endpoint, params=params, json=body) as resp: + resp_json = await resp.json() + embeddings.append(resp_json["vector"]) + + return embeddings + + def before_retry_sleep(self, retry_state): + logger.info("Rate limited on the Vision embeddings API, sleeping before retrying...") diff --git a/app/backend/prepdocslib/fileprocessor.py b/app/backend/prepdocslib/fileprocessor.py new file mode 100644 index 0000000000..3b58130db8 --- /dev/null +++ b/app/backend/prepdocslib/fileprocessor.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass + +from .parser import Parser +from .textsplitter import TextSplitter + + +@dataclass(frozen=True) +class FileProcessor: + parser: Parser + splitter: TextSplitter diff --git a/app/backend/prepdocslib/filestrategy.py b/app/backend/prepdocslib/filestrategy.py new file mode 100644 index 0000000000..37f399cf4b --- /dev/null +++ b/app/backend/prepdocslib/filestrategy.py @@ -0,0 +1,164 @@ +import logging +from typing import Optional + +from azure.core.credentials import AzureKeyCredential + +from .blobmanager import BlobManager +from .embeddings import ImageEmbeddings, OpenAIEmbeddings +from .fileprocessor import FileProcessor +from .listfilestrategy import File, ListFileStrategy +from .mediadescriber import ContentUnderstandingDescriber +from .searchmanager import SearchManager, Section +from .strategy import DocumentAction, SearchInfo, Strategy + +logger = logging.getLogger("scripts") + + +async def parse_file( + file: File, + file_processors: dict[str, FileProcessor], + category: Optional[str] = None, + image_embeddings: Optional[ImageEmbeddings] = None, +) -> list[Section]: + key = file.file_extension().lower() + processor = file_processors.get(key) + if processor is None: + logger.info("Skipping '%s', no parser found.", file.filename()) + return [] + logger.info("Ingesting '%s'", file.filename()) + pages = [page async for page in processor.parser.parse(content=file.content)] + logger.info("Splitting '%s' into sections", file.filename()) + if image_embeddings: + logger.warning("Each page will be split into smaller chunks of text, but images will be of the entire page.") + sections = [ + Section(split_page, content=file, category=category) for split_page in processor.splitter.split_pages(pages) + ] + return sections + + +class FileStrategy(Strategy): + """ + Strategy for ingesting documents into a search service from files stored either locally or in a data lake storage account + """ + + def __init__( + self, + list_file_strategy: ListFileStrategy, + blob_manager: BlobManager, + search_info: SearchInfo, + file_processors: dict[str, FileProcessor], + document_action: DocumentAction = DocumentAction.Add, + embeddings: Optional[OpenAIEmbeddings] = None, + image_embeddings: Optional[ImageEmbeddings] = None, + search_analyzer_name: Optional[str] = None, + search_field_name_embedding: Optional[str] = None, + use_acls: bool = False, + category: Optional[str] = None, + use_content_understanding: bool = False, + content_understanding_endpoint: Optional[str] = None, + ): + self.list_file_strategy = list_file_strategy + self.blob_manager = blob_manager + self.file_processors = file_processors + self.document_action = document_action + self.embeddings = embeddings + self.image_embeddings = image_embeddings + self.search_analyzer_name = search_analyzer_name + self.search_field_name_embedding = search_field_name_embedding + self.search_info = search_info + self.use_acls = use_acls + self.category = category + self.use_content_understanding = use_content_understanding + self.content_understanding_endpoint = content_understanding_endpoint + + def setup_search_manager(self): + self.search_manager = SearchManager( + self.search_info, + self.search_analyzer_name, + self.use_acls, + False, + self.embeddings, + field_name_embedding=self.search_field_name_embedding, + search_images=self.image_embeddings is not None, + ) + + async def setup(self): + self.setup_search_manager() + await self.search_manager.create_index() + + if self.use_content_understanding: + if self.content_understanding_endpoint is None: + raise ValueError("Content Understanding is enabled but no endpoint was provided") + if isinstance(self.search_info.credential, AzureKeyCredential): + raise ValueError( + "AzureKeyCredential is not supported for Content Understanding, use keyless auth instead" + ) + cu_manager = ContentUnderstandingDescriber(self.content_understanding_endpoint, self.search_info.credential) + await cu_manager.create_analyzer() + + async def run(self): + self.setup_search_manager() + if self.document_action == DocumentAction.Add: + files = self.list_file_strategy.list() + async for file in files: + try: + sections = await parse_file(file, self.file_processors, self.category, self.image_embeddings) + if sections: + blob_sas_uris = await self.blob_manager.upload_blob(file) + blob_image_embeddings: Optional[list[list[float]]] = None + if self.image_embeddings and blob_sas_uris: + blob_image_embeddings = await self.image_embeddings.create_embeddings(blob_sas_uris) + await self.search_manager.update_content(sections, blob_image_embeddings, url=file.url) + finally: + if file: + file.close() + elif self.document_action == DocumentAction.Remove: + paths = self.list_file_strategy.list_paths() + async for path in paths: + await self.blob_manager.remove_blob(path) + await self.search_manager.remove_content(path) + elif self.document_action == DocumentAction.RemoveAll: + await self.blob_manager.remove_blob() + await self.search_manager.remove_content() + + +class UploadUserFileStrategy: + """ + Strategy for ingesting a file that has already been uploaded to a ADLS2 storage account + """ + + def __init__( + self, + search_info: SearchInfo, + file_processors: dict[str, FileProcessor], + embeddings: Optional[OpenAIEmbeddings] = None, + image_embeddings: Optional[ImageEmbeddings] = None, + search_field_name_embedding: Optional[str] = None, + ): + self.file_processors = file_processors + self.embeddings = embeddings + self.image_embeddings = image_embeddings + self.search_info = search_info + self.search_manager = SearchManager( + search_info=self.search_info, + search_analyzer_name=None, + use_acls=True, + use_int_vectorization=False, + embeddings=self.embeddings, + field_name_embedding=search_field_name_embedding, + search_images=False, + ) + self.search_field_name_embedding = search_field_name_embedding + + async def add_file(self, file: File): + if self.image_embeddings: + logging.warning("Image embeddings are not currently supported for the user upload feature") + sections = await parse_file(file, self.file_processors) + if sections: + await self.search_manager.update_content(sections, url=file.url) + + async def remove_file(self, filename: str, oid: str): + if filename is None or filename == "": + logging.warning("Filename is required to remove a file") + return + await self.search_manager.remove_content(filename, oid) diff --git a/app/backend/prepdocslib/htmlparser.py b/app/backend/prepdocslib/htmlparser.py new file mode 100644 index 0000000000..719045b393 --- /dev/null +++ b/app/backend/prepdocslib/htmlparser.py @@ -0,0 +1,50 @@ +import logging +import re +from collections.abc import AsyncGenerator +from typing import IO + +from bs4 import BeautifulSoup + +from .page import Page +from .parser import Parser + +logger = logging.getLogger("scripts") + + +def cleanup_data(data: str) -> str: + """Cleans up the given content using regexes + Args: + data: (str): The data to clean up. + Returns: + str: The cleaned up data. + """ + # match two or more newlines and replace them with one new line + output = re.sub(r"\n{2,}", "\n", data) + # match two or more spaces that are not newlines and replace them with one space + output = re.sub(r"[^\S\n]{2,}", " ", output) + # match two or more hyphens and replace them with two hyphens + output = re.sub(r"-{2,}", "--", output) + + return output.strip() + + +class LocalHTMLParser(Parser): + """Parses HTML text into Page objects.""" + + async def parse(self, content: IO) -> AsyncGenerator[Page, None]: + """Parses the given content. + To learn more, please visit https://pypi.org/project/beautifulsoup4/ + Args: + content (IO): The content to parse. + Returns: + Page: The parsed html Page. + """ + logger.info("Extracting text from '%s' using local HTML parser (BeautifulSoup)", content.name) + + data = content.read() + soup = BeautifulSoup(data, "html.parser") + + # Get text only from html file + result = soup.get_text() + + yield Page(0, 0, text=cleanup_data(result)) diff --git a/app/backend/prepdocslib/integratedvectorizerstrategy.py b/app/backend/prepdocslib/integratedvectorizerstrategy.py new file mode 100644 index 0000000000..9e89facc4c --- /dev/null +++ b/app/backend/prepdocslib/integratedvectorizerstrategy.py @@ -0,0 +1,195 @@ +import logging +from typing import Optional + +from azure.search.documents.indexes._generated.models import ( + NativeBlobSoftDeleteDeletionDetectionPolicy, +) +from azure.search.documents.indexes.models import ( + AzureOpenAIEmbeddingSkill, + IndexProjectionMode, + InputFieldMappingEntry, + OutputFieldMappingEntry, + SearchIndexer, + SearchIndexerDataContainer, + SearchIndexerDataSourceConnection, + SearchIndexerDataSourceType, + SearchIndexerIndexProjection, + SearchIndexerIndexProjectionSelector, + SearchIndexerIndexProjectionsParameters, + SearchIndexerSkillset, + SplitSkill, +) + +from .blobmanager import BlobManager +from .embeddings import AzureOpenAIEmbeddingService +from .listfilestrategy import ListFileStrategy +from .searchmanager import SearchManager +from .strategy import DocumentAction, SearchInfo, Strategy + +logger = logging.getLogger("scripts") + + +class IntegratedVectorizerStrategy(Strategy): + """ + Strategy for ingesting and vectorizing documents into a search service from files stored storage account + """ + + def __init__( + self, + list_file_strategy: ListFileStrategy, + blob_manager: BlobManager, + search_info: SearchInfo, + embeddings: AzureOpenAIEmbeddingService, + search_field_name_embedding: str, + subscription_id: str, + search_service_user_assigned_id: str, + document_action: DocumentAction = DocumentAction.Add, + search_analyzer_name: Optional[str] = None, + use_acls: bool = False, + category: Optional[str] = None, + ): + + self.list_file_strategy = list_file_strategy + self.blob_manager = blob_manager + self.document_action = document_action + self.embeddings = embeddings + self.search_field_name_embedding = search_field_name_embedding + self.subscription_id = subscription_id + self.search_user_assigned_identity = search_service_user_assigned_id + self.search_analyzer_name = search_analyzer_name + self.use_acls = use_acls + self.category = category + self.search_info = search_info + prefix = f"{self.search_info.index_name}-{self.search_field_name_embedding}" + self.skillset_name = f"{prefix}-skillset" + self.indexer_name = f"{prefix}-indexer" + self.data_source_name = f"{prefix}-blob" + + async def create_embedding_skill(self, index_name: str) -> SearchIndexerSkillset: + """ + Create a skillset for the indexer to chunk documents and generate embeddings + """ + + split_skill = SplitSkill( + name="split-skill", + description="Split skill to chunk documents", + text_split_mode="pages", + context="/document", + maximum_page_length=2048, + page_overlap_length=20, + inputs=[ + InputFieldMappingEntry(name="text", source="/document/content"), + ], + outputs=[OutputFieldMappingEntry(name="textItems", target_name="pages")], + ) + + embedding_skill = AzureOpenAIEmbeddingSkill( + name="embedding-skill", + description="Skill to generate embeddings via Azure OpenAI", + context="/document/pages/*", + resource_url=f"https://{self.embeddings.open_ai_service}.openai.azure.com", + deployment_name=self.embeddings.open_ai_deployment, + model_name=self.embeddings.open_ai_model_name, + dimensions=self.embeddings.open_ai_dimensions, + inputs=[ + InputFieldMappingEntry(name="text", source="/document/pages/*"), + ], + outputs=[OutputFieldMappingEntry(name="embedding", target_name="vector")], + ) + + index_projection = SearchIndexerIndexProjection( + selectors=[ + SearchIndexerIndexProjectionSelector( + target_index_name=index_name, + parent_key_field_name="parent_id", + source_context="/document/pages/*", + mappings=[ + InputFieldMappingEntry(name="content", source="/document/pages/*"), + InputFieldMappingEntry(name="sourcepage", source="/document/metadata_storage_name"), + InputFieldMappingEntry(name="sourcefile", source="/document/metadata_storage_name"), + InputFieldMappingEntry(name="storageUrl", source="/document/metadata_storage_path"), + InputFieldMappingEntry( + name=self.search_field_name_embedding, source="/document/pages/*/vector" + ), + ], + ), + ], + parameters=SearchIndexerIndexProjectionsParameters( + projection_mode=IndexProjectionMode.SKIP_INDEXING_PARENT_DOCUMENTS + ), + ) + + skillset = SearchIndexerSkillset( + name=self.skillset_name, + description="Skillset to chunk documents and generate embeddings", + skills=[split_skill, embedding_skill], + index_projection=index_projection, + ) + + return skillset + + async def setup(self): + logger.info("Setting up search index using integrated vectorization...") + search_manager = SearchManager( + search_info=self.search_info, + search_analyzer_name=self.search_analyzer_name, + use_acls=self.use_acls, + use_int_vectorization=True, + embeddings=self.embeddings, + field_name_embedding=self.search_field_name_embedding, + search_images=False, + ) + + await search_manager.create_index() + + ds_client = self.search_info.create_search_indexer_client() + ds_container = SearchIndexerDataContainer(name=self.blob_manager.container) + data_source_connection = SearchIndexerDataSourceConnection( + name=self.data_source_name, + type=SearchIndexerDataSourceType.AZURE_BLOB, + connection_string=self.blob_manager.get_managedidentity_connectionstring(), + container=ds_container, + data_deletion_detection_policy=NativeBlobSoftDeleteDeletionDetectionPolicy(), + ) + + await ds_client.create_or_update_data_source_connection(data_source_connection) + + embedding_skillset = await self.create_embedding_skill(self.search_info.index_name) + await ds_client.create_or_update_skillset(embedding_skillset) + await ds_client.close() + + async def run(self): + if self.document_action == DocumentAction.Add: + files = self.list_file_strategy.list() + async for file in files: + try: + await self.blob_manager.upload_blob(file) + finally: + if file: + file.close() + elif self.document_action == DocumentAction.Remove: + paths = self.list_file_strategy.list_paths() + async for path in paths: + await self.blob_manager.remove_blob(path) + elif self.document_action == DocumentAction.RemoveAll: + await self.blob_manager.remove_blob() + + # Create an indexer + indexer = SearchIndexer( + name=self.indexer_name, + description="Indexer to index documents and generate embeddings", + skillset_name=self.skillset_name, + target_index_name=self.search_info.index_name, + data_source_name=self.data_source_name, + ) + + indexer_client = self.search_info.create_search_indexer_client() + indexer_result = await indexer_client.create_or_update_indexer(indexer) + + # Run the indexer + await indexer_client.run_indexer(self.indexer_name) + await indexer_client.close() + + logger.info( + f"Successfully created index, indexer: {indexer_result.name}, and skillset. Please navigate to search service in Azure Portal to view the status of the indexer." + ) diff --git a/app/backend/prepdocslib/jsonparser.py b/app/backend/prepdocslib/jsonparser.py new file mode 100644 index 0000000000..bc17c7ceec --- /dev/null +++ b/app/backend/prepdocslib/jsonparser.py @@ -0,0 +1,24 @@ +import json +from collections.abc import AsyncGenerator +from typing import IO + +from .page import Page +from .parser import Parser + + +class JsonParser(Parser): + """ + Concrete parser that can parse JSON into Page objects. A top-level object becomes a single Page, while a top-level array becomes multiple Page objects. + """ + + async def parse(self, content: IO) -> AsyncGenerator[Page, None]: + offset = 0 + data = json.loads(content.read()) + if isinstance(data, list): + for i, obj in enumerate(data): + offset += 1 # For opening bracket or comma before object + page_text = json.dumps(obj) + yield Page(i, offset, page_text) + offset += len(page_text) + elif isinstance(data, dict): + yield Page(0, 0, json.dumps(data)) diff --git a/app/backend/prepdocslib/listfilestrategy.py b/app/backend/prepdocslib/listfilestrategy.py new file mode 100644 index 0000000000..bdceef0754 --- /dev/null +++ b/app/backend/prepdocslib/listfilestrategy.py @@ -0,0 +1,178 @@ +import base64 +import hashlib +import logging +import os +import re +import tempfile +from abc import ABC +from collections.abc import AsyncGenerator +from glob import glob +from typing import IO, Optional, Union + +from azure.core.credentials_async import AsyncTokenCredential +from azure.storage.filedatalake.aio import ( + DataLakeServiceClient, +) + +logger = logging.getLogger("scripts") + + +class File: + """ + Represents a file stored either locally or in a data lake storage account + This file might contain access control information about which users or groups can access it + """ + + def __init__(self, content: IO, acls: Optional[dict[str, list]] = None, url: Optional[str] = None): + self.content = content + self.acls = acls or {} + self.url = url + + def filename(self): + return os.path.basename(self.content.name) + + def file_extension(self): + return os.path.splitext(self.content.name)[1] + + def filename_to_id(self): + filename_ascii = re.sub("[^0-9a-zA-Z_-]", "_", self.filename()) + filename_hash = base64.b16encode(self.filename().encode("utf-8")).decode("ascii") + acls_hash = "" + if self.acls: + acls_hash = base64.b16encode(str(self.acls).encode("utf-8")).decode("ascii") + return f"file-{filename_ascii}-{filename_hash}{acls_hash}" + + def close(self): + if self.content: + self.content.close() + + +class ListFileStrategy(ABC): + """ + Abstract strategy for listing files that are located somewhere. For example, on a local computer or remotely in a storage account + """ + + async def list(self) -> AsyncGenerator[File, None]: + if False: # pragma: no cover - this is necessary for mypy to type check + yield + + async def list_paths(self) -> AsyncGenerator[str, None]: + if False: # pragma: no cover - this is necessary for mypy to type check + yield + + +class LocalListFileStrategy(ListFileStrategy): + """ + Concrete strategy for listing files that are located in a local filesystem + """ + + def __init__(self, path_pattern: str): + self.path_pattern = path_pattern + + async def list_paths(self) -> AsyncGenerator[str, None]: + async for p in self._list_paths(self.path_pattern): + yield p + + async def _list_paths(self, path_pattern: str) -> AsyncGenerator[str, None]: + for path in glob(path_pattern): + if os.path.isdir(path): + async for p in self._list_paths(f"{path}/*"): + yield p + else: + # Only list files, not directories + yield path + + async def list(self) -> AsyncGenerator[File, None]: + async for path in self.list_paths(): + if not self.check_md5(path): + yield File(content=open(path, mode="rb")) + + def check_md5(self, path: str) -> bool: + # if filename ends in .md5 skip + if path.endswith(".md5"): + return True + + # if there is a file called .md5 in this directory, see if its updated + stored_hash = None + with open(path, "rb") as file: + existing_hash = hashlib.md5(file.read()).hexdigest() + hash_path = f"{path}.md5" + if os.path.exists(hash_path): + with open(hash_path, encoding="utf-8") as md5_f: + stored_hash = md5_f.read() + + if stored_hash and stored_hash.strip() == existing_hash.strip(): + logger.info("Skipping %s, no changes detected.", path) + return True + + # Write the hash + with open(hash_path, "w", encoding="utf-8") as md5_f: + md5_f.write(existing_hash) + + return False + + +class ADLSGen2ListFileStrategy(ListFileStrategy): + """ + Concrete strategy for listing files that are located in a data lake storage account + """ + + def __init__( + self, + data_lake_storage_account: str, + data_lake_filesystem: str, + data_lake_path: str, + credential: Union[AsyncTokenCredential, str], + ): + self.data_lake_storage_account = data_lake_storage_account + self.data_lake_filesystem = data_lake_filesystem + self.data_lake_path = data_lake_path + self.credential = credential + + async def list_paths(self) -> AsyncGenerator[str, None]: + async with DataLakeServiceClient( + account_url=f"https://{self.data_lake_storage_account}.dfs.core.windows.net", credential=self.credential + ) as service_client, service_client.get_file_system_client(self.data_lake_filesystem) as filesystem_client: + async for path in filesystem_client.get_paths(path=self.data_lake_path, recursive=True): + if path.is_directory: + continue + + yield path.name + + async def list(self) -> AsyncGenerator[File, None]: + async with DataLakeServiceClient( + account_url=f"https://{self.data_lake_storage_account}.dfs.core.windows.net", credential=self.credential + ) as service_client, service_client.get_file_system_client(self.data_lake_filesystem) as filesystem_client: + async for path in self.list_paths(): + temp_file_path = os.path.join(tempfile.gettempdir(), os.path.basename(path)) + try: + async with filesystem_client.get_file_client(path) as file_client: + with open(temp_file_path, "wb") as temp_file: + downloader = await file_client.download_file() + await downloader.readinto(temp_file) + # Parse out user ids and group ids + acls: dict[str, list[str]] = {"oids": [], "groups": []} + # https://learn.microsoft.com/python/api/azure-storage-file-datalake/azure.storage.filedatalake.datalakefileclient?view=azure-python#azure-storage-filedatalake-datalakefileclient-get-access-control + # Request ACLs as GUIDs + access_control = await file_client.get_access_control(upn=False) + acl_list = access_control["acl"] + # https://learn.microsoft.com/azure/storage/blobs/data-lake-storage-access-control + # ACL Format: user::rwx,group::r-x,other::r--,user:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx:r-- + acl_list = acl_list.split(",") + for acl in acl_list: + acl_parts: list = acl.split(":") + if len(acl_parts) != 3: + continue + if len(acl_parts[1]) == 0: + continue + if acl_parts[0] == "user" and "r" in acl_parts[2]: + acls["oids"].append(acl_parts[1]) + if acl_parts[0] == "group" and "r" in acl_parts[2]: + acls["groups"].append(acl_parts[1]) + yield File(content=open(temp_file_path, "rb"), acls=acls, url=file_client.url) + except Exception as data_lake_exception: + logger.error(f"\tGot an error while reading {path} -> {data_lake_exception} --> skipping file") + try: + os.remove(temp_file_path) + except Exception as file_delete_exception: + logger.error(f"\tGot an error while deleting {temp_file_path} -> {file_delete_exception}") diff --git a/app/backend/prepdocslib/mediadescriber.py b/app/backend/prepdocslib/mediadescriber.py new file mode 100644 index 0000000000..5aae79232e --- /dev/null +++ b/app/backend/prepdocslib/mediadescriber.py @@ -0,0 +1,107 @@ +import logging +from abc import ABC + +import aiohttp +from azure.core.credentials_async import AsyncTokenCredential +from azure.identity.aio import get_bearer_token_provider +from rich.progress import Progress +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed + +logger = logging.getLogger("scripts") + + +class MediaDescriber(ABC): + + async def describe_image(self, image_bytes) -> str: + raise NotImplementedError # pragma: no cover + + +class ContentUnderstandingDescriber: + CU_API_VERSION = "2024-12-01-preview" + + analyzer_schema = { + "analyzerId": "image_analyzer", + "name": "Image understanding", + "description": "Extract detailed structured information from images extracted from documents.", + "baseAnalyzerId": "prebuilt-image", + "scenario": "image", + "config": {"returnDetails": False}, + "fieldSchema": { + "name": "ImageInformation", + "descriptions": "Description of image.", + "fields": { + "Description": { + "type": "string", + "description": "Description of the image. If the image has a title, start with the title. Include a 2-sentence summary. If the image is a chart, diagram, or table, include the underlying data in an HTML table tag, with accurate numbers. If the image is a chart, describe any axis or legends. The only allowed HTML tags are the table/thead/tr/td/tbody tags.", + }, + }, + }, + } + + def __init__(self, endpoint: str, credential: AsyncTokenCredential): + self.endpoint = endpoint + self.credential = credential + + async def poll_api(self, session, poll_url, headers): + + @retry(stop=stop_after_attempt(60), wait=wait_fixed(2), retry=retry_if_exception_type(ValueError)) + async def poll(): + async with session.get(poll_url, headers=headers) as response: + response.raise_for_status() + response_json = await response.json() + if response_json["status"] == "Failed": + raise Exception("Failed") + if response_json["status"] == "Running": + raise ValueError("Running") + return response_json + + return await poll() + + async def create_analyzer(self): + logger.info("Creating analyzer '%s'...", self.analyzer_schema["analyzerId"]) + + token_provider = get_bearer_token_provider(self.credential, "https://cognitiveservices.azure.com/.default") + token = await token_provider() + headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"} + params = {"api-version": self.CU_API_VERSION} + analyzer_id = self.analyzer_schema["analyzerId"] + cu_endpoint = f"{self.endpoint}/contentunderstanding/analyzers/{analyzer_id}" + async with aiohttp.ClientSession() as session: + async with session.put( + url=cu_endpoint, params=params, headers=headers, json=self.analyzer_schema + ) as response: + if response.status == 409: + logger.info("Analyzer '%s' already exists.", analyzer_id) + return + elif response.status != 201: + data = await response.text() + raise Exception("Error creating analyzer", data) + else: + poll_url = response.headers.get("Operation-Location") + + with Progress() as progress: + progress.add_task("Creating analyzer...", total=None, start=False) + await self.poll_api(session, poll_url, headers) + + async def describe_image(self, image_bytes: bytes) -> str: + logger.info("Sending image to Azure Content Understanding service...") + async with aiohttp.ClientSession() as session: + token = await self.credential.get_token("https://cognitiveservices.azure.com/.default") + headers = {"Authorization": "Bearer " + token.token} + params = {"api-version": self.CU_API_VERSION} + analyzer_name = self.analyzer_schema["analyzerId"] + async with session.post( + url=f"{self.endpoint}/contentunderstanding/analyzers/{analyzer_name}:analyze", + params=params, + headers=headers, + data=image_bytes, + ) as response: + response.raise_for_status() + poll_url = response.headers["Operation-Location"] + + with Progress() as progress: + progress.add_task("Processing...", total=None, start=False) + results = await self.poll_api(session, poll_url, headers) + + fields = results["result"]["contents"][0]["fields"] + return fields["Description"]["valueString"] diff --git a/app/backend/prepdocslib/page.py b/app/backend/prepdocslib/page.py new file mode 100644 index 0000000000..857235c571 --- /dev/null +++ b/app/backend/prepdocslib/page.py @@ -0,0 +1,28 @@ +class Page: + """ + A single page from a document + + Attributes: + page_num (int): Page number (0-indexed) + offset (int): If the text of the entire Document was concatenated into a single string, the index of the first character on the page. For example, if page 1 had the text "hello" and page 2 had the text "world", the offset of page 2 is 5 ("hellow") + text (str): The text of the page + """ + + def __init__(self, page_num: int, offset: int, text: str): + self.page_num = page_num + self.offset = offset + self.text = text + + +class SplitPage: + """ + A section of a page that has been split into a smaller chunk. + + Attributes: + page_num (int): Page number (0-indexed) + text (str): The text of the section + """ + + def __init__(self, page_num: int, text: str): + self.page_num = page_num + self.text = text diff --git a/app/backend/prepdocslib/parser.py b/app/backend/prepdocslib/parser.py new file mode 100644 index 0000000000..1552c00fdc --- /dev/null +++ b/app/backend/prepdocslib/parser.py @@ -0,0 +1,15 @@ +from abc import ABC +from collections.abc import AsyncGenerator +from typing import IO + +from .page import Page + + +class Parser(ABC): + """ + Abstract parser that parses content into Page objects + """ + + async def parse(self, content: IO) -> AsyncGenerator[Page, None]: + if False: + yield # pragma: no cover - this is necessary for mypy to type check diff --git a/app/backend/prepdocslib/pdfparser.py b/app/backend/prepdocslib/pdfparser.py new file mode 100644 index 0000000000..c96980d21c --- /dev/null +++ b/app/backend/prepdocslib/pdfparser.py @@ -0,0 +1,251 @@ +import html +import io +import logging +from collections.abc import AsyncGenerator +from enum import Enum +from typing import IO, Union + +import pymupdf +from azure.ai.documentintelligence.aio import DocumentIntelligenceClient +from azure.ai.documentintelligence.models import ( + AnalyzeDocumentRequest, + AnalyzeResult, + DocumentFigure, + DocumentTable, +) +from azure.core.credentials import AzureKeyCredential +from azure.core.credentials_async import AsyncTokenCredential +from azure.core.exceptions import HttpResponseError +from PIL import Image +from pypdf import PdfReader + +from .mediadescriber import ContentUnderstandingDescriber +from .page import Page +from .parser import Parser + +logger = logging.getLogger("scripts") + + +class LocalPdfParser(Parser): + """ + Concrete parser backed by PyPDF that can parse PDFs into pages + To learn more, please visit https://pypi.org/project/pypdf/ + """ + + async def parse(self, content: IO) -> AsyncGenerator[Page, None]: + logger.info("Extracting text from '%s' using local PDF parser (pypdf)", content.name) + + reader = PdfReader(content) + pages = reader.pages + offset = 0 + for page_num, p in enumerate(pages): + page_text = p.extract_text() + yield Page(page_num=page_num, offset=offset, text=page_text) + offset += len(page_text) + + +class DocumentAnalysisParser(Parser): + """ + Concrete parser backed by Azure AI Document Intelligence that can parse many document formats into pages + To learn more, please visit https://learn.microsoft.com/azure/ai-services/document-intelligence/overview + """ + + def __init__( + self, + endpoint: str, + credential: Union[AsyncTokenCredential, AzureKeyCredential], + model_id="prebuilt-layout", + use_content_understanding=True, + content_understanding_endpoint: Union[str, None] = None, + ): + self.model_id = model_id + self.endpoint = endpoint + self.credential = credential + self.use_content_understanding = use_content_understanding + self.content_understanding_endpoint = content_understanding_endpoint + + async def parse(self, content: IO) -> AsyncGenerator[Page, None]: + logger.info("Extracting text from '%s' using Azure Document Intelligence", content.name) + + async with DocumentIntelligenceClient( + endpoint=self.endpoint, credential=self.credential + ) as document_intelligence_client: + file_analyzed = False + if self.use_content_understanding: + if self.content_understanding_endpoint is None: + raise ValueError("Content Understanding is enabled but no endpoint was provided") + if isinstance(self.credential, AzureKeyCredential): + raise ValueError( + "AzureKeyCredential is not supported for Content Understanding, use keyless auth instead" + ) + cu_describer = ContentUnderstandingDescriber(self.content_understanding_endpoint, self.credential) + content_bytes = content.read() + try: + poller = await document_intelligence_client.begin_analyze_document( + model_id="prebuilt-layout", + analyze_request=AnalyzeDocumentRequest(bytes_source=content_bytes), + output=["figures"], + features=["ocrHighResolution"], + output_content_format="markdown", + ) + doc_for_pymupdf = pymupdf.open(stream=io.BytesIO(content_bytes)) + file_analyzed = True + except HttpResponseError as e: + content.seek(0) + if e.error and e.error.code == "InvalidArgument": + logger.error( + "This document type does not support media description. Proceeding with standard analysis." + ) + else: + logger.error( + "Unexpected error analyzing document for media description: %s. Proceeding with standard analysis.", + e, + ) + + if file_analyzed is False: + poller = await document_intelligence_client.begin_analyze_document( + model_id=self.model_id, analyze_request=content, content_type="application/octet-stream" + ) + analyze_result: AnalyzeResult = await poller.result() + + offset = 0 + for page in analyze_result.pages: + tables_on_page = [ + table + for table in (analyze_result.tables or []) + if table.bounding_regions and table.bounding_regions[0].page_number == page.page_number + ] + figures_on_page = [] + if self.use_content_understanding: + figures_on_page = [ + figure + for figure in (analyze_result.figures or []) + if figure.bounding_regions and figure.bounding_regions[0].page_number == page.page_number + ] + + class ObjectType(Enum): + NONE = -1 + TABLE = 0 + FIGURE = 1 + + page_offset = page.spans[0].offset + page_length = page.spans[0].length + mask_chars: list[tuple[ObjectType, Union[int, None]]] = [(ObjectType.NONE, None)] * page_length + # mark all positions of the table spans in the page + for table_idx, table in enumerate(tables_on_page): + for span in table.spans: + # replace all table spans with "table_id" in table_chars array + for i in range(span.length): + idx = span.offset - page_offset + i + if idx >= 0 and idx < page_length: + mask_chars[idx] = (ObjectType.TABLE, table_idx) + # mark all positions of the figure spans in the page + for figure_idx, figure in enumerate(figures_on_page): + for span in figure.spans: + # replace all figure spans with "figure_id" in figure_chars array + for i in range(span.length): + idx = span.offset - page_offset + i + if idx >= 0 and idx < page_length: + mask_chars[idx] = (ObjectType.FIGURE, figure_idx) + + # build page text by replacing characters in table spans with table html + page_text = "" + added_objects = set() # set of object types todo mypy + for idx, mask_char in enumerate(mask_chars): + object_type, object_idx = mask_char + if object_type == ObjectType.NONE: + page_text += analyze_result.content[page_offset + idx] + elif object_type == ObjectType.TABLE: + if object_idx is None: + raise ValueError("Expected object_idx to be set") + if mask_char not in added_objects: + page_text += DocumentAnalysisParser.table_to_html(tables_on_page[object_idx]) + added_objects.add(mask_char) + elif object_type == ObjectType.FIGURE: + if cu_describer is None: + raise ValueError("cu_describer should not be None, unable to describe figure") + if object_idx is None: + raise ValueError("Expected object_idx to be set") + if mask_char not in added_objects: + figure_html = await DocumentAnalysisParser.figure_to_html( + doc_for_pymupdf, figures_on_page[object_idx], cu_describer + ) + page_text += figure_html + added_objects.add(mask_char) + # We remove these comments since they are not needed and skew the page numbers + page_text = page_text.replace("", "") + # We remove excess newlines at the beginning and end of the page + page_text = page_text.strip() + yield Page(page_num=page.page_number - 1, offset=offset, text=page_text) + offset += len(page_text) + + @staticmethod + async def figure_to_html( + doc: pymupdf.Document, figure: DocumentFigure, cu_describer: ContentUnderstandingDescriber + ) -> str: + figure_title = (figure.caption and figure.caption.content) or "" + logger.info("Describing figure %s with title '%s'", figure.id, figure_title) + if not figure.bounding_regions: + return f"
{figure_title}
" + if len(figure.bounding_regions) > 1: + logger.warning("Figure %s has more than one bounding region, using the first one", figure.id) + first_region = figure.bounding_regions[0] + # To learn more about bounding regions, see https://aka.ms/bounding-region + bounding_box = ( + first_region.polygon[0], # x0 (left) + first_region.polygon[1], # y0 (top + first_region.polygon[4], # x1 (right) + first_region.polygon[5], # y1 (bottom) + ) + page_number = first_region["pageNumber"] # 1-indexed + cropped_img = DocumentAnalysisParser.crop_image_from_pdf_page(doc, page_number - 1, bounding_box) + figure_description = await cu_describer.describe_image(cropped_img) + return f"
{figure_title}
{figure_description}
" + + @staticmethod + def table_to_html(table: DocumentTable): + table_html = "
" + rows = [ + sorted([cell for cell in table.cells if cell.row_index == i], key=lambda cell: cell.column_index) + for i in range(table.row_count) + ] + for row_cells in rows: + table_html += "" + for cell in row_cells: + tag = "th" if (cell.kind == "columnHeader" or cell.kind == "rowHeader") else "td" + cell_spans = "" + if cell.column_span is not None and cell.column_span > 1: + cell_spans += f" colSpan={cell.column_span}" + if cell.row_span is not None and cell.row_span > 1: + cell_spans += f" rowSpan={cell.row_span}" + table_html += f"<{tag}{cell_spans}>{html.escape(cell.content)}" + table_html += "" + table_html += "
" + return table_html + + @staticmethod + def crop_image_from_pdf_page( + doc: pymupdf.Document, page_number: int, bbox_inches: tuple[float, float, float, float] + ) -> bytes: + """ + Crops a region from a given page in a PDF and returns it as an image. + + :param pdf_path: Path to the PDF file. + :param page_number: The page number to crop from (0-indexed). + :param bbox_inches: A tuple of (x0, y0, x1, y1) coordinates for the bounding box, in inches. + :return: A PIL Image of the cropped area. + """ + # Scale the bounding box to 72 DPI + bbox_dpi = 72 + bbox_pixels = [x * bbox_dpi for x in bbox_inches] + rect = pymupdf.Rect(bbox_pixels) + # Assume that the PDF has 300 DPI, + # and use the matrix to convert between the 2 DPIs + page_dpi = 300 + page = doc.load_page(page_number) + pix = page.get_pixmap(matrix=pymupdf.Matrix(page_dpi / bbox_dpi, page_dpi / bbox_dpi), clip=rect) + + img = Image.frombytes("RGB", (pix.width, pix.height), pix.samples) + bytes_io = io.BytesIO() + img.save(bytes_io, format="PNG") + return bytes_io.getvalue() diff --git a/app/backend/prepdocslib/searchmanager.py b/app/backend/prepdocslib/searchmanager.py new file mode 100644 index 0000000000..e6ca925e24 --- /dev/null +++ b/app/backend/prepdocslib/searchmanager.py @@ -0,0 +1,491 @@ +import asyncio +import logging +import os +from typing import Optional + +from azure.search.documents.indexes.models import ( + AzureOpenAIVectorizer, + AzureOpenAIVectorizerParameters, + BinaryQuantizationCompression, + HnswAlgorithmConfiguration, + HnswParameters, + KnowledgeAgent, + KnowledgeAgentAzureOpenAIModel, + KnowledgeAgentRequestLimits, + KnowledgeAgentTargetIndex, + RescoringOptions, + SearchableField, + SearchField, + SearchFieldDataType, + SearchIndex, + SemanticConfiguration, + SemanticField, + SemanticPrioritizedFields, + SemanticSearch, + SimpleField, + VectorSearch, + VectorSearchAlgorithmConfiguration, + VectorSearchCompression, + VectorSearchCompressionRescoreStorageMethod, + VectorSearchProfile, + VectorSearchVectorizer, +) + +from .blobmanager import BlobManager +from .embeddings import AzureOpenAIEmbeddingService, OpenAIEmbeddings +from .listfilestrategy import File +from .strategy import SearchInfo +from .textsplitter import SplitPage + +logger = logging.getLogger("scripts") + + +class Section: + """ + A section of a page that is stored in a search service. These sections are used as context by Azure OpenAI service + """ + + def __init__(self, split_page: SplitPage, content: File, category: Optional[str] = None): + self.split_page = split_page + self.content = content + self.category = category + + +class SearchManager: + """ + Class to manage a search service. It can create indexes, and update or remove sections stored in these indexes + To learn more, please visit https://learn.microsoft.com/azure/search/search-what-is-azure-search + """ + + def __init__( + self, + search_info: SearchInfo, + search_analyzer_name: Optional[str] = None, + use_acls: bool = False, + use_int_vectorization: bool = False, + embeddings: Optional[OpenAIEmbeddings] = None, + field_name_embedding: Optional[str] = None, + search_images: bool = False, + ): + self.search_info = search_info + self.search_analyzer_name = search_analyzer_name + self.use_acls = use_acls + self.use_int_vectorization = use_int_vectorization + self.embeddings = embeddings + self.embedding_dimensions = self.embeddings.open_ai_dimensions if self.embeddings else None + self.field_name_embedding = field_name_embedding + self.search_images = search_images + + async def create_index(self): + logger.info("Checking whether search index %s exists...", self.search_info.index_name) + + async with self.search_info.create_search_index_client() as search_index_client: + + embedding_field = None + image_embedding_field = None + text_vector_search_profile = None + text_vector_algorithm = None + text_vector_compression = None + image_vector_search_profile = None + image_vector_algorithm = None + + if self.embeddings: + if self.embedding_dimensions is None: + raise ValueError( + "Embedding dimensions must be set in order to add an embedding field to the search index" + ) + if self.field_name_embedding is None: + raise ValueError( + "Embedding field must be set in order to add an embedding field to the search index" + ) + + text_vectorizer = None + if isinstance(self.embeddings, AzureOpenAIEmbeddingService): + text_vectorizer = AzureOpenAIVectorizer( + vectorizer_name=f"{self.embeddings.open_ai_model_name}-vectorizer", + parameters=AzureOpenAIVectorizerParameters( + resource_url=self.embeddings.open_ai_endpoint, + deployment_name=self.embeddings.open_ai_deployment, + model_name=self.embeddings.open_ai_model_name, + ), + ) + + text_vector_algorithm = HnswAlgorithmConfiguration( + name="hnsw_config", + parameters=HnswParameters(metric="cosine"), + ) + text_vector_compression = BinaryQuantizationCompression( + compression_name=f"{self.field_name_embedding}-compression", + truncation_dimension=1024, # should this be a parameter? maybe not yet? + rescoring_options=RescoringOptions( + enable_rescoring=True, + default_oversampling=10, + rescore_storage_method=VectorSearchCompressionRescoreStorageMethod.PRESERVE_ORIGINALS, + ), + # Explicitly set deprecated parameters to None + rerank_with_original_vectors=None, + default_oversampling=None, + ) + text_vector_search_profile = VectorSearchProfile( + name=f"{self.field_name_embedding}-profile", + algorithm_configuration_name=text_vector_algorithm.name, + compression_name=text_vector_compression.compression_name, + **({"vectorizer_name": text_vectorizer.vectorizer_name if text_vectorizer else None}), + ) + + embedding_field = SearchField( + name=self.field_name_embedding, + type=SearchFieldDataType.Collection(SearchFieldDataType.Single), + hidden=True, + searchable=True, + filterable=False, + sortable=False, + facetable=False, + vector_search_dimensions=self.embedding_dimensions, + vector_search_profile_name=f"{self.field_name_embedding}-profile", + stored=False, + ) + + if self.search_images: + image_vector_algorithm = HnswAlgorithmConfiguration( + name="image_hnsw_config", + parameters=HnswParameters(metric="cosine"), + ) + image_vector_search_profile = VectorSearchProfile( + name="imageEmbedding-profile", + algorithm_configuration_name=image_vector_algorithm.name, + ) + image_embedding_field = SearchField( + name="imageEmbedding", + type=SearchFieldDataType.Collection(SearchFieldDataType.Single), + hidden=False, + searchable=True, + filterable=False, + sortable=False, + facetable=False, + vector_search_dimensions=1024, + vector_search_profile_name=image_vector_search_profile.name, + ) + + if self.search_info.index_name not in [name async for name in search_index_client.list_index_names()]: + logger.info("Creating new search index %s", self.search_info.index_name) + fields = [ + ( + SimpleField(name="id", type="Edm.String", key=True) + if not self.use_int_vectorization + else SearchField( + name="id", + type="Edm.String", + key=True, + sortable=True, + filterable=True, + facetable=True, + analyzer_name="keyword", + ) + ), + SearchableField( + name="content", + type="Edm.String", + analyzer_name=self.search_analyzer_name, + ), + SimpleField(name="category", type="Edm.String", filterable=True, facetable=True), + SimpleField( + name="sourcepage", + type="Edm.String", + filterable=True, + facetable=True, + ), + SimpleField( + name="sourcefile", + type="Edm.String", + filterable=True, + facetable=True, + ), + SimpleField( + name="storageUrl", + type="Edm.String", + filterable=True, + facetable=False, + ), + ] + if self.use_acls: + fields.append( + SimpleField( + name="oids", + type=SearchFieldDataType.Collection(SearchFieldDataType.String), + filterable=True, + ) + ) + fields.append( + SimpleField( + name="groups", + type=SearchFieldDataType.Collection(SearchFieldDataType.String), + filterable=True, + ) + ) + + if self.use_int_vectorization: + logger.info("Including parent_id field for integrated vectorization support in new index") + fields.append(SearchableField(name="parent_id", type="Edm.String", filterable=True)) + + vectorizers: list[VectorSearchVectorizer] = [] + vector_search_profiles = [] + vector_algorithms: list[VectorSearchAlgorithmConfiguration] = [] + vector_compressions: list[VectorSearchCompression] = [] + if embedding_field: + logger.info("Including %s field for text vectors in new index", embedding_field.name) + fields.append(embedding_field) + if text_vectorizer is not None: + vectorizers.append(text_vectorizer) + if ( + text_vector_search_profile is None + or text_vector_algorithm is None + or text_vector_compression is None + ): + raise ValueError("Text vector search profile, algorithm and compression must be set") + vector_search_profiles.append(text_vector_search_profile) + vector_algorithms.append(text_vector_algorithm) + vector_compressions.append(text_vector_compression) + + if image_embedding_field: + logger.info("Including %s field for image vectors in new index", image_embedding_field.name) + fields.append(image_embedding_field) + if image_vector_search_profile is None or image_vector_algorithm is None: + raise ValueError("Image search profile and algorithm must be set") + vector_search_profiles.append(image_vector_search_profile) + vector_algorithms.append(image_vector_algorithm) + + index = SearchIndex( + name=self.search_info.index_name, + fields=fields, + semantic_search=SemanticSearch( + default_configuration_name="default", + configurations=[ + SemanticConfiguration( + name="default", + prioritized_fields=SemanticPrioritizedFields( + title_field=SemanticField(field_name="sourcepage"), + content_fields=[SemanticField(field_name="content")], + ), + ) + ], + ), + vector_search=VectorSearch( + profiles=vector_search_profiles, + algorithms=vector_algorithms, + compressions=vector_compressions, + vectorizers=vectorizers, + ), + ) + + await search_index_client.create_index(index) + else: + logger.info("Search index %s already exists", self.search_info.index_name) + existing_index = await search_index_client.get_index(self.search_info.index_name) + if not any(field.name == "storageUrl" for field in existing_index.fields): + logger.info("Adding storageUrl field to index %s", self.search_info.index_name) + existing_index.fields.append( + SimpleField( + name="storageUrl", + type="Edm.String", + filterable=True, + facetable=False, + ), + ) + await search_index_client.create_or_update_index(existing_index) + + if embedding_field and not any( + field.name == self.field_name_embedding for field in existing_index.fields + ): + logger.info("Adding %s field for text embeddings", self.field_name_embedding) + existing_index.fields.append(embedding_field) + if existing_index.vector_search is None: + raise ValueError("Vector search is not enabled for the existing index") + if text_vectorizer is not None: + if existing_index.vector_search.vectorizers is None: + existing_index.vector_search.vectorizers = [] + existing_index.vector_search.vectorizers.append(text_vectorizer) + if ( + text_vector_search_profile is None + or text_vector_algorithm is None + or text_vector_compression is None + ): + raise ValueError("Text vector search profile, algorithm and compression must be set") + if existing_index.vector_search.profiles is None: + existing_index.vector_search.profiles = [] + existing_index.vector_search.profiles.append(text_vector_search_profile) + if existing_index.vector_search.algorithms is None: + existing_index.vector_search.algorithms = [] + existing_index.vector_search.algorithms.append(text_vector_algorithm) + if existing_index.vector_search.compressions is None: + existing_index.vector_search.compressions = [] + existing_index.vector_search.compressions.append(text_vector_compression) + await search_index_client.create_or_update_index(existing_index) + + if image_embedding_field and not any(field.name == "imageEmbedding" for field in existing_index.fields): + logger.info("Adding %s field for image embeddings", image_embedding_field.name) + existing_index.fields.append(image_embedding_field) + if image_vector_search_profile is None or image_vector_algorithm is None: + raise ValueError("Image vector search profile and algorithm must be set") + if existing_index.vector_search is None: + raise ValueError("Image vector search is not enabled for the existing index") + if existing_index.vector_search.profiles is None: + existing_index.vector_search.profiles = [] + existing_index.vector_search.profiles.append(image_vector_search_profile) + if existing_index.vector_search.algorithms is None: + existing_index.vector_search.algorithms = [] + existing_index.vector_search.algorithms.append(image_vector_algorithm) + await search_index_client.create_or_update_index(existing_index) + + if existing_index.semantic_search: + if not existing_index.semantic_search.default_configuration_name: + logger.info("Adding default semantic configuration to index %s", self.search_info.index_name) + existing_index.semantic_search.default_configuration_name = "default" + + if existing_index.semantic_search.configurations: + existing_semantic_config = existing_index.semantic_search.configurations[0] + if ( + existing_semantic_config.prioritized_fields + and existing_semantic_config.prioritized_fields.title_field + and not existing_semantic_config.prioritized_fields.title_field.field_name == "sourcepage" + ): + logger.info("Updating semantic configuration for index %s", self.search_info.index_name) + existing_semantic_config.prioritized_fields.title_field = SemanticField( + field_name="sourcepage" + ) + + if existing_index.vector_search is not None and ( + existing_index.vector_search.vectorizers is None + or len(existing_index.vector_search.vectorizers) == 0 + ): + if self.embeddings is not None and isinstance(self.embeddings, AzureOpenAIEmbeddingService): + logger.info("Adding vectorizer to search index %s", self.search_info.index_name) + existing_index.vector_search.vectorizers = [ + AzureOpenAIVectorizer( + vectorizer_name=f"{self.search_info.index_name}-vectorizer", + parameters=AzureOpenAIVectorizerParameters( + resource_url=self.embeddings.open_ai_endpoint, + deployment_name=self.embeddings.open_ai_deployment, + model_name=self.embeddings.open_ai_model_name, + ), + ) + ] + await search_index_client.create_or_update_index(existing_index) + + else: + logger.info( + "Can't add vectorizer to search index %s since no Azure OpenAI embeddings service is defined", + self.search_info, + ) + if self.search_info.use_agentic_retrieval and self.search_info.agent_name: + await self.create_agent() + + async def create_agent(self): + if self.search_info.agent_name: + logger.info(f"Creating search agent named {self.search_info.agent_name}") + + async with self.search_info.create_search_index_client() as search_index_client: + await search_index_client.create_or_update_agent( + agent=KnowledgeAgent( + name=self.search_info.agent_name, + target_indexes=[ + KnowledgeAgentTargetIndex( + index_name=self.search_info.index_name, default_include_reference_source_data=True + ) + ], + models=[ + KnowledgeAgentAzureOpenAIModel( + azure_open_ai_parameters=AzureOpenAIVectorizerParameters( + resource_url=self.search_info.azure_openai_endpoint, + deployment_name=self.search_info.azure_openai_searchagent_deployment, + model_name=self.search_info.azure_openai_searchagent_model, + ) + ) + ], + request_limits=KnowledgeAgentRequestLimits( + max_output_size=self.search_info.agent_max_output_tokens + ), + ) + ) + + logger.info("Agent %s created successfully", self.search_info.agent_name) + + async def update_content( + self, sections: list[Section], image_embeddings: Optional[list[list[float]]] = None, url: Optional[str] = None + ): + MAX_BATCH_SIZE = 1000 + section_batches = [sections[i : i + MAX_BATCH_SIZE] for i in range(0, len(sections), MAX_BATCH_SIZE)] + + async with self.search_info.create_search_client() as search_client: + for batch_index, batch in enumerate(section_batches): + documents = [ + { + "id": f"{section.content.filename_to_id()}-page-{section_index + batch_index * MAX_BATCH_SIZE}", + "content": section.split_page.text, + "category": section.category, + "sourcepage": ( + BlobManager.blob_image_name_from_file_page( + filename=section.content.filename(), + page=section.split_page.page_num, + ) + if image_embeddings + else BlobManager.sourcepage_from_file_page( + filename=section.content.filename(), + page=section.split_page.page_num, + ) + ), + "sourcefile": section.content.filename(), + **section.content.acls, + } + for section_index, section in enumerate(batch) + ] + if url: + for document in documents: + document["storageUrl"] = url + if self.embeddings: + if self.field_name_embedding is None: + raise ValueError("Embedding field name must be set") + embeddings = await self.embeddings.create_embeddings( + texts=[section.split_page.text for section in batch] + ) + for i, document in enumerate(documents): + document[self.field_name_embedding] = embeddings[i] + if image_embeddings: + for i, (document, section) in enumerate(zip(documents, batch)): + document["imageEmbedding"] = image_embeddings[section.split_page.page_num] + + await search_client.upload_documents(documents) + + async def remove_content(self, path: Optional[str] = None, only_oid: Optional[str] = None): + logger.info( + "Removing sections from '{%s or ''}' from search index '%s'", path, self.search_info.index_name + ) + async with self.search_info.create_search_client() as search_client: + while True: + filter = None + if path is not None: + # Replace ' with '' to escape the single quote for the filter + # https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax#escaping-special-characters-in-string-constants + path_for_filter = os.path.basename(path).replace("'", "''") + filter = f"sourcefile eq '{path_for_filter}'" + max_results = 1000 + result = await search_client.search( + search_text="", filter=filter, top=max_results, include_total_count=True + ) + result_count = await result.get_count() + if result_count == 0: + break + documents_to_remove = [] + async for document in result: + # If only_oid is set, only remove documents that have only this oid + if not only_oid or document.get("oids") == [only_oid]: + documents_to_remove.append({"id": document["id"]}) + if len(documents_to_remove) == 0: + if result_count < max_results: + break + else: + continue + removed_docs = await search_client.delete_documents(documents_to_remove) + logger.info("Removed %d sections from index", len(removed_docs)) + # It can take a few seconds for search results to reflect changes, so wait a bit + await asyncio.sleep(2) diff --git a/app/backend/prepdocslib/strategy.py b/app/backend/prepdocslib/strategy.py new file mode 100644 index 0000000000..05bc72804d --- /dev/null +++ b/app/backend/prepdocslib/strategy.py @@ -0,0 +1,66 @@ +from abc import ABC +from enum import Enum +from typing import Optional, Union + +from azure.core.credentials import AzureKeyCredential +from azure.core.credentials_async import AsyncTokenCredential +from azure.search.documents.aio import SearchClient +from azure.search.documents.indexes.aio import SearchIndexClient, SearchIndexerClient + +USER_AGENT = "azure-search-chat-demo/1.0.0" + + +class SearchInfo: + """ + Class representing a connection to a search service + To learn more, please visit https://learn.microsoft.com/azure/search/search-what-is-azure-search + """ + + def __init__( + self, + endpoint: str, + credential: Union[AsyncTokenCredential, AzureKeyCredential], + index_name: str, + use_agentic_retrieval: Optional[bool] = False, + agent_name: Optional[str] = None, + agent_max_output_tokens: Optional[int] = None, + azure_openai_searchagent_model: Optional[str] = None, + azure_openai_searchagent_deployment: Optional[str] = None, + azure_openai_endpoint: Optional[str] = None, + ): + self.endpoint = endpoint + self.credential = credential + self.index_name = index_name + self.agent_name = agent_name + self.agent_max_output_tokens = agent_max_output_tokens + self.use_agentic_retrieval = use_agentic_retrieval + self.azure_openai_searchagent_model = azure_openai_searchagent_model + self.azure_openai_searchagent_deployment = azure_openai_searchagent_deployment + self.azure_openai_endpoint = azure_openai_endpoint + + def create_search_client(self) -> SearchClient: + return SearchClient(endpoint=self.endpoint, index_name=self.index_name, credential=self.credential) + + def create_search_index_client(self) -> SearchIndexClient: + return SearchIndexClient(endpoint=self.endpoint, credential=self.credential) + + def create_search_indexer_client(self) -> SearchIndexerClient: + return SearchIndexerClient(endpoint=self.endpoint, credential=self.credential) + + +class DocumentAction(Enum): + Add = 0 + Remove = 1 + RemoveAll = 2 + + +class Strategy(ABC): + """ + Abstract strategy for ingesting documents into a search service. It has a single setup step to perform any required initialization, and then a run step that actually ingests documents into the search service. + """ + + async def setup(self): + raise NotImplementedError + + async def run(self): + raise NotImplementedError diff --git a/app/backend/prepdocslib/textparser.py b/app/backend/prepdocslib/textparser.py new file mode 100644 index 0000000000..2ffea49c8d --- /dev/null +++ b/app/backend/prepdocslib/textparser.py @@ -0,0 +1,31 @@ +import re +from collections.abc import AsyncGenerator +from typing import IO + +from .page import Page +from .parser import Parser + + +def cleanup_data(data: str) -> str: + """Cleans up the given content using regexes + Args: + data: (str): The data to clean up. + Returns: + str: The cleaned up data. + """ + # match two or more newlines and replace them with one new line + output = re.sub(r"\n{2,}", "\n", data) + # match two or more spaces that are not newlines and replace them with one space + output = re.sub(r"[^\S\n]{2,}", " ", output) + + return output.strip() + + +class TextParser(Parser): + """Parses simple text into a Page object.""" + + async def parse(self, content: IO) -> AsyncGenerator[Page, None]: + data = content.read() + decoded_data = data.decode("utf-8") + text = cleanup_data(decoded_data) + yield Page(0, 0, text=text) diff --git a/app/backend/prepdocslib/textsplitter.py b/app/backend/prepdocslib/textsplitter.py new file mode 100644 index 0000000000..1beedf86a1 --- /dev/null +++ b/app/backend/prepdocslib/textsplitter.py @@ -0,0 +1,232 @@ +import logging +from abc import ABC +from collections.abc import Generator + +import tiktoken + +from .page import Page, SplitPage + +logger = logging.getLogger("scripts") + + +class TextSplitter(ABC): + """ + Splits a list of pages into smaller chunks + :param pages: The pages to split + :return: A generator of SplitPage + """ + + def split_pages(self, pages: list[Page]) -> Generator[SplitPage, None, None]: + if False: + yield # pragma: no cover - this is necessary for mypy to type check + + +ENCODING_MODEL = "text-embedding-ada-002" + +STANDARD_WORD_BREAKS = [",", ";", ":", " ", "(", ")", "[", "]", "{", "}", "\t", "\n"] + +# See W3C document https://www.w3.org/TR/jlreq/#cl-01 +CJK_WORD_BREAKS = [ + "、", + ",", + ";", + ":", + "(", + ")", + "【", + "】", + "「", + "」", + "『", + "』", + "〔", + "〕", + "〈", + "〉", + "《", + "》", + "〖", + "〗", + "〘", + "〙", + "〚", + "〛", + "〝", + "〞", + "〟", + "〰", + "–", + "—", + "‘", + "’", + "‚", + "‛", + "“", + "”", + "„", + "‟", + "‹", + "›", +] + +STANDARD_SENTENCE_ENDINGS = [".", "!", "?"] + +# See CL05 and CL06, based on JIS X 4051:2004 +# https://www.w3.org/TR/jlreq/#cl-04 +CJK_SENTENCE_ENDINGS = ["。", "!", "?", "‼", "⁇", "⁈", "⁉"] + +# NB: text-embedding-3-XX is the same BPE as text-embedding-ada-002 +bpe = tiktoken.encoding_for_model(ENCODING_MODEL) + +DEFAULT_OVERLAP_PERCENT = 10 # See semantic search article for 10% overlap performance +DEFAULT_SECTION_LENGTH = 1000 # Roughly 400-500 tokens for English + + +class SentenceTextSplitter(TextSplitter): + """ + Class that splits pages into smaller chunks. This is required because embedding models may not be able to analyze an entire page at once + """ + + def __init__(self, max_tokens_per_section: int = 500): + self.sentence_endings = STANDARD_SENTENCE_ENDINGS + CJK_SENTENCE_ENDINGS + self.word_breaks = STANDARD_WORD_BREAKS + CJK_WORD_BREAKS + self.max_section_length = DEFAULT_SECTION_LENGTH + self.sentence_search_limit = 100 + self.max_tokens_per_section = max_tokens_per_section + self.section_overlap = int(self.max_section_length * DEFAULT_OVERLAP_PERCENT / 100) + + def split_page_by_max_tokens(self, page_num: int, text: str) -> Generator[SplitPage, None, None]: + """ + Recursively splits page by maximum number of tokens to better handle languages with higher token/word ratios. + """ + tokens = bpe.encode(text) + if len(tokens) <= self.max_tokens_per_section: + # Section is already within max tokens, return + yield SplitPage(page_num=page_num, text=text) + else: + # Start from the center and try and find the closest sentence ending by spiralling outward. + # IF we get to the outer thirds, then just split in half with a 5% overlap + start = int(len(text) // 2) + pos = 0 + boundary = int(len(text) // 3) + split_position = -1 + while start - pos > boundary: + if text[start - pos] in self.sentence_endings: + split_position = start - pos + break + elif text[start + pos] in self.sentence_endings: + split_position = start + pos + break + else: + pos += 1 + + if split_position > 0: + first_half = text[: split_position + 1] + second_half = text[split_position + 1 :] + else: + # Split page in half and call function again + # Overlap first and second halves by DEFAULT_OVERLAP_PERCENT% + middle = int(len(text) // 2) + overlap = int(len(text) * (DEFAULT_OVERLAP_PERCENT / 100)) + first_half = text[: middle + overlap] + second_half = text[middle - overlap :] + yield from self.split_page_by_max_tokens(page_num, first_half) + yield from self.split_page_by_max_tokens(page_num, second_half) + + def split_pages(self, pages: list[Page]) -> Generator[SplitPage, None, None]: + def find_page(offset): + num_pages = len(pages) + for i in range(num_pages - 1): + if offset >= pages[i].offset and offset < pages[i + 1].offset: + return pages[i].page_num + return pages[num_pages - 1].page_num + + all_text = "".join(page.text for page in pages) + if len(all_text.strip()) == 0: + return + + length = len(all_text) + if length <= self.max_section_length: + yield from self.split_page_by_max_tokens(page_num=find_page(0), text=all_text) + return + + start = 0 + end = length + while start + self.section_overlap < length: + last_word = -1 + end = start + self.max_section_length + + if end > length: + end = length + else: + # Try to find the end of the sentence + while ( + end < length + and (end - start - self.max_section_length) < self.sentence_search_limit + and all_text[end] not in self.sentence_endings + ): + if all_text[end] in self.word_breaks: + last_word = end + end += 1 + if end < length and all_text[end] not in self.sentence_endings and last_word > 0: + end = last_word # Fall back to at least keeping a whole word + if end < length: + end += 1 + + # Try to find the start of the sentence or at least a whole word boundary + last_word = -1 + while ( + start > 0 + and start > end - self.max_section_length - 2 * self.sentence_search_limit + and all_text[start] not in self.sentence_endings + ): + if all_text[start] in self.word_breaks: + last_word = start + start -= 1 + if all_text[start] not in self.sentence_endings and last_word > 0: + start = last_word + if start > 0: + start += 1 + + section_text = all_text[start:end] + yield from self.split_page_by_max_tokens(page_num=find_page(start), text=section_text) + + last_figure_start = section_text.rfind(" 2 * self.sentence_search_limit and last_figure_start > section_text.rfind( + " Generator[SplitPage, None, None]: + all_text = "".join(page.text for page in pages) + if len(all_text.strip()) == 0: + return + + length = len(all_text) + if length <= self.max_object_length: + yield SplitPage(page_num=0, text=all_text) + return + + # its too big, so we need to split it + for i in range(0, length, self.max_object_length): + yield SplitPage(page_num=i // self.max_object_length, text=all_text[i : i + self.max_object_length]) + return diff --git a/app/backend/requirements.in b/app/backend/requirements.in new file mode 100644 index 0000000000..ac889f2b9d --- /dev/null +++ b/app/backend/requirements.in @@ -0,0 +1,33 @@ +azure-identity +quart +quart-cors +openai>=1.3.7 +tiktoken +tenacity +azure-ai-documentintelligence==1.0.0b4 +azure-cognitiveservices-speech +azure-cosmos +azure-search-documents==11.6.0b12 +azure-storage-blob +azure-storage-file-datalake +uvicorn +aiohttp +azure-monitor-opentelemetry +opentelemetry-instrumentation-asgi +opentelemetry-instrumentation-httpx +opentelemetry-instrumentation-aiohttp-client +opentelemetry-instrumentation-openai +msal +cryptography +PyJWT +Pillow +types-Pillow +pypdf +PyMuPDF +beautifulsoup4 +types-beautifulsoup4 +msgraph-sdk +python-dotenv +prompty +rich +typing-extensions diff --git a/app/backend/requirements.txt b/app/backend/requirements.txt index 5620fbd7ba..0cd4981685 100644 --- a/app/backend/requirements.txt +++ b/app/backend/requirements.txt @@ -1,7 +1,436 @@ -azure-identity==1.13.0 -Flask==2.2.5 -langchain==0.0.187 -openai[datalib]==0.27.8 -tiktoken==0.4.0 -azure-search-documents==11.4.0b6 -azure-storage-blob==12.14.1 +# This file was autogenerated by uv via the following command: +# uv pip compile requirements.in -o requirements.txt +aiofiles==24.1.0 + # via + # prompty + # quart +aiohappyeyeballs==2.4.4 + # via aiohttp +aiohttp==3.10.11 + # via + # -r requirements.in + # microsoft-kiota-authentication-azure +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.7.0 + # via pydantic +anyio==4.4.0 + # via + # httpx + # openai +asgiref==3.8.1 + # via opentelemetry-instrumentation-asgi +attrs==24.2.0 + # via aiohttp +azure-ai-documentintelligence==1.0.0b4 + # via -r requirements.in +azure-cognitiveservices-speech==1.40.0 + # via -r requirements.in +azure-common==1.1.28 + # via azure-search-documents +azure-core==1.30.2 + # via + # azure-ai-documentintelligence + # azure-core-tracing-opentelemetry + # azure-cosmos + # azure-identity + # azure-monitor-opentelemetry + # azure-monitor-opentelemetry-exporter + # azure-search-documents + # azure-storage-blob + # azure-storage-file-datalake + # microsoft-kiota-authentication-azure + # msrest +azure-core-tracing-opentelemetry==1.0.0b11 + # via azure-monitor-opentelemetry +azure-cosmos==4.9.0 + # via -r requirements.in +azure-identity==1.17.1 + # via + # -r requirements.in + # msgraph-sdk +azure-monitor-opentelemetry==1.6.1 + # via -r requirements.in +azure-monitor-opentelemetry-exporter==1.0.0b32 + # via azure-monitor-opentelemetry +azure-search-documents==11.6.0b12 + # via -r requirements.in +azure-storage-blob==12.22.0 + # via + # -r requirements.in + # azure-storage-file-datalake +azure-storage-file-datalake==12.16.0 + # via -r requirements.in +beautifulsoup4==4.12.3 + # via -r requirements.in +blinker==1.8.2 + # via + # flask + # quart +certifi==2024.7.4 + # via + # httpcore + # httpx + # msrest + # requests +cffi==1.17.0 + # via cryptography +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via + # flask + # prompty + # quart + # uvicorn +cryptography==44.0.1 + # via + # -r requirements.in + # azure-identity + # azure-storage-blob + # msal + # pyjwt +deprecated==1.2.14 + # via + # opentelemetry-api + # opentelemetry-semantic-conventions +distro==1.9.0 + # via openai +fixedint==0.1.6 + # via azure-monitor-opentelemetry-exporter +flask==3.0.3 + # via quart +frozenlist==1.4.1 + # via + # aiohttp + # aiosignal +h11==0.14.0 + # via + # httpcore + # hypercorn + # uvicorn + # wsproto +h2==4.1.0 + # via + # httpx + # hypercorn +hpack==4.0.0 + # via h2 +httpcore==1.0.5 + # via httpx +httpx==0.27.0 + # via + # microsoft-kiota-http + # msgraph-core + # openai +hypercorn==0.17.3 + # via quart +hyperframe==6.0.1 + # via h2 +idna==3.10 + # via + # anyio + # httpx + # requests + # yarl +importlib-metadata==8.0.0 + # via opentelemetry-api +isodate==0.6.1 + # via + # azure-ai-documentintelligence + # azure-search-documents + # azure-storage-blob + # azure-storage-file-datalake + # msrest +itsdangerous==2.2.0 + # via + # flask + # quart +jinja2==3.1.6 + # via + # flask + # prompty + # quart +jiter==0.8.2 + # via openai +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.5 + # via + # jinja2 + # quart + # werkzeug +mdurl==0.1.2 + # via markdown-it-py +microsoft-kiota-abstractions==1.9.3 + # via + # microsoft-kiota-authentication-azure + # microsoft-kiota-http + # microsoft-kiota-serialization-form + # microsoft-kiota-serialization-json + # microsoft-kiota-serialization-multipart + # microsoft-kiota-serialization-text + # msgraph-core +microsoft-kiota-authentication-azure==1.9.3 + # via msgraph-core +microsoft-kiota-http==1.9.3 + # via msgraph-core +microsoft-kiota-serialization-form==1.9.3 + # via msgraph-sdk +microsoft-kiota-serialization-json==1.9.3 + # via msgraph-sdk +microsoft-kiota-serialization-multipart==1.9.3 + # via msgraph-sdk +microsoft-kiota-serialization-text==1.9.3 + # via msgraph-sdk +msal==1.30.0 + # via + # -r requirements.in + # azure-identity + # msal-extensions +msal-extensions==1.3.1 + # via azure-identity +msgraph-core==1.3.3 + # via msgraph-sdk +msgraph-sdk==1.26.0 + # via -r requirements.in +msrest==0.7.1 + # via azure-monitor-opentelemetry-exporter +multidict==6.0.5 + # via + # aiohttp + # yarl +oauthlib==3.2.2 + # via requests-oauthlib +openai==1.63.0 + # via -r requirements.in +opentelemetry-api==1.31.1 + # via + # azure-core-tracing-opentelemetry + # azure-monitor-opentelemetry-exporter + # microsoft-kiota-abstractions + # microsoft-kiota-authentication-azure + # microsoft-kiota-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-aiohttp-client + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-django + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-flask + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-openai + # opentelemetry-instrumentation-psycopg2 + # opentelemetry-instrumentation-requests + # opentelemetry-instrumentation-urllib + # opentelemetry-instrumentation-urllib3 + # opentelemetry-instrumentation-wsgi + # opentelemetry-sdk + # opentelemetry-semantic-conventions +opentelemetry-instrumentation==0.52b1 + # via + # opentelemetry-instrumentation-aiohttp-client + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-django + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-flask + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-openai + # opentelemetry-instrumentation-psycopg2 + # opentelemetry-instrumentation-requests + # opentelemetry-instrumentation-urllib + # opentelemetry-instrumentation-urllib3 + # opentelemetry-instrumentation-wsgi +opentelemetry-instrumentation-aiohttp-client==0.52b1 + # via -r requirements.in +opentelemetry-instrumentation-asgi==0.52b1 + # via + # -r requirements.in + # opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-dbapi==0.52b1 + # via opentelemetry-instrumentation-psycopg2 +opentelemetry-instrumentation-django==0.52b1 + # via azure-monitor-opentelemetry +opentelemetry-instrumentation-fastapi==0.52b1 + # via azure-monitor-opentelemetry +opentelemetry-instrumentation-flask==0.52b1 + # via azure-monitor-opentelemetry +opentelemetry-instrumentation-httpx==0.52b1 + # via -r requirements.in +opentelemetry-instrumentation-openai==0.39.0 + # via -r requirements.in +opentelemetry-instrumentation-psycopg2==0.52b1 + # via azure-monitor-opentelemetry +opentelemetry-instrumentation-requests==0.52b1 + # via azure-monitor-opentelemetry +opentelemetry-instrumentation-urllib==0.52b1 + # via azure-monitor-opentelemetry +opentelemetry-instrumentation-urllib3==0.52b1 + # via azure-monitor-opentelemetry +opentelemetry-instrumentation-wsgi==0.52b1 + # via + # opentelemetry-instrumentation-django + # opentelemetry-instrumentation-flask +opentelemetry-resource-detector-azure==0.1.5 + # via azure-monitor-opentelemetry +opentelemetry-sdk==1.31.1 + # via + # azure-monitor-opentelemetry + # azure-monitor-opentelemetry-exporter + # microsoft-kiota-abstractions + # microsoft-kiota-authentication-azure + # microsoft-kiota-http + # opentelemetry-resource-detector-azure +opentelemetry-semantic-conventions==0.52b1 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-aiohttp-client + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-django + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-flask + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-openai + # opentelemetry-instrumentation-requests + # opentelemetry-instrumentation-urllib + # opentelemetry-instrumentation-urllib3 + # opentelemetry-instrumentation-wsgi + # opentelemetry-sdk +opentelemetry-semantic-conventions-ai==0.4.3 + # via opentelemetry-instrumentation-openai +opentelemetry-util-http==0.52b1 + # via + # opentelemetry-instrumentation-aiohttp-client + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-django + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-flask + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-requests + # opentelemetry-instrumentation-urllib + # opentelemetry-instrumentation-urllib3 + # opentelemetry-instrumentation-wsgi +packaging==24.1 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-flask +pillow==10.4.0 + # via -r requirements.in +portalocker==2.10.1 + # via msal-extensions +priority==2.0.0 + # via hypercorn +prompty==0.1.50 + # via -r requirements.in +propcache==0.2.0 + # via yarl +psutil==5.9.8 + # via azure-monitor-opentelemetry-exporter +pycparser==2.22 + # via cffi +pydantic==2.8.2 + # via + # openai + # prompty +pydantic-core==2.20.1 + # via pydantic +pygments==2.18.0 + # via rich +pyjwt==2.10.1 + # via + # -r requirements.in + # msal +pymupdf==1.25.1 + # via -r requirements.in +pypdf==4.3.1 + # via -r requirements.in +python-dotenv==1.0.1 + # via + # -r requirements.in + # prompty +pyyaml==6.0.2 + # via prompty +quart==0.20.0 + # via + # -r requirements.in + # quart-cors +quart-cors==0.7.0 + # via -r requirements.in +regex==2024.11.6 + # via tiktoken +requests==2.32.3 + # via + # azure-core + # msal + # msrest + # requests-oauthlib + # tiktoken +requests-oauthlib==2.0.0 + # via msrest +rich==13.9.4 + # via -r requirements.in +six==1.16.0 + # via + # azure-core + # isodate +sniffio==1.3.1 + # via + # anyio + # httpx + # openai +soupsieve==2.6 + # via beautifulsoup4 +std-uritemplate==2.0.3 + # via microsoft-kiota-abstractions +tenacity==9.0.0 + # via -r requirements.in +tiktoken==0.8.0 + # via + # -r requirements.in + # opentelemetry-instrumentation-openai +tqdm==4.66.5 + # via openai +types-beautifulsoup4==4.12.0.20240511 + # via -r requirements.in +types-html5lib==1.1.11.20241018 + # via types-beautifulsoup4 +types-pillow==10.2.0.20240822 + # via -r requirements.in +typing-extensions==4.12.2 + # via + # -r requirements.in + # azure-ai-documentintelligence + # azure-core + # azure-cosmos + # azure-identity + # azure-search-documents + # azure-storage-blob + # azure-storage-file-datalake + # openai + # opentelemetry-sdk + # pydantic + # pydantic-core +urllib3==2.2.2 + # via requests +uvicorn==0.30.6 + # via -r requirements.in +werkzeug==3.0.6 + # via + # flask + # quart +wrapt==1.16.0 + # via + # deprecated + # opentelemetry-instrumentation + # opentelemetry-instrumentation-aiohttp-client + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-urllib3 +wsproto==1.2.0 + # via hypercorn +yarl==1.17.2 + # via aiohttp +zipp==3.21.0 + # via importlib-metadata diff --git a/app/backend/text.py b/app/backend/text.py deleted file mode 100644 index 9209778c91..0000000000 --- a/app/backend/text.py +++ /dev/null @@ -1,2 +0,0 @@ -def nonewlines(s: str) -> str: - return s.replace('\n', ' ').replace('\r', ' ') diff --git a/app/frontend/.npmrc b/app/frontend/.npmrc index b6f27f1359..727cdb2649 100644 --- a/app/frontend/.npmrc +++ b/app/frontend/.npmrc @@ -1 +1,2 @@ engine-strict=true +fund=false diff --git a/app/frontend/.nvmrc b/app/frontend/.nvmrc new file mode 100644 index 0000000000..1d975bef24 --- /dev/null +++ b/app/frontend/.nvmrc @@ -0,0 +1 @@ +22.0.0 diff --git a/app/frontend/.prettierignore b/app/frontend/.prettierignore new file mode 100644 index 0000000000..fc355bcdfb --- /dev/null +++ b/app/frontend/.prettierignore @@ -0,0 +1,2 @@ +# Ignore JSON +**/*.json diff --git a/app/frontend/index.html b/app/frontend/index.html index 4d96bfbd97..30205db90f 100644 --- a/app/frontend/index.html +++ b/app/frontend/index.html @@ -1,13 +1,13 @@ - + - - - - - GPT + Enterprise data | Sample - - -
- - + + + + + Azure OpenAI + AI Search + + +
+ + diff --git a/app/frontend/package-lock.json b/app/frontend/package-lock.json index 71a9d2d84d..b4ec8fb7a0 100644 --- a/app/frontend/package-lock.json +++ b/app/frontend/package-lock.json @@ -1,89 +1,139 @@ { "name": "frontend", "version": "0.0.0", - "lockfileVersion": 2, + "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "frontend", "version": "0.0.0", "dependencies": { - "@fluentui/react": "^8.110.7", - "@fluentui/react-icons": "^2.0.206", - "@react-spring/web": "^9.7.3", - "dompurify": "^3.0.4", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "react-router-dom": "^6.14.1" + "@azure/msal-browser": "^3.26.1", + "@azure/msal-react": "^2.2.0", + "@fluentui/react": "^8.112.5", + "@fluentui/react-components": "^9.56.2", + "@fluentui/react-icons": "^2.0.265", + "@react-spring/web": "^9.7.5", + "dompurify": "^3.2.4", + "i18next": "^24.2.0", + "i18next-browser-languagedetector": "^8.0.2", + "i18next-http-backend": "^3.0.1", + "idb": "^8.0.0", + "ndjson-readablestream": "^1.2.0", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-helmet-async": "^2.0.5", + "react-i18next": "^15.4.1", + "react-markdown": "^9.0.1", + "react-router-dom": "^6.28.0", + "react-syntax-highlighter": "^15.6.1", + "rehype-raw": "^7.0.0", + "remark-gfm": "^4.0.0", + "scheduler": "^0.20.2" }, "devDependencies": { - "@types/dompurify": "^3.0.2", - "@types/react": "^18.2.14", - "@types/react-dom": "^18.2.6", - "@vitejs/plugin-react": "^4.0.2", - "prettier": "^3.0.0", - "typescript": "^5.1.6", - "vite": "^4.4.2" + "@types/dom-speech-recognition": "^0.0.4", + "@types/dompurify": "^3.0.5", + "@types/react": "^18.3.12", + "@types/react-dom": "^18.3.1", + "@types/react-syntax-highlighter": "^15.5.13", + "@vitejs/plugin-react": "^4.3.3", + "prettier": "^3.3.3", + "rollup-plugin-visualizer": "^5.12.0", + "typescript": "^5.6.3", + "vite": "^5.4.18" }, "engines": { "node": ">=14.0.0" } }, "node_modules/@ampproject/remapping": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", - "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", "dev": true, "dependencies": { - "@jridgewell/gen-mapping": "^0.3.0", - "@jridgewell/trace-mapping": "^0.3.9" + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" }, "engines": { "node": ">=6.0.0" } }, + "node_modules/@azure/msal-browser": { + "version": "3.27.0", + "resolved": "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-3.27.0.tgz", + "integrity": "sha512-+b4ZKSD8+vslCtVRVetkegEhOFMLP3rxDWJY212ct+2r6jVg6OSQKc1Qz3kCoXo0FgwaXkb+76TMZfpHp8QtgA==", + "dependencies": { + "@azure/msal-common": "14.16.0" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-common": { + "version": "14.16.0", + "resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-14.16.0.tgz", + "integrity": "sha512-1KOZj9IpcDSwpNiQNjt0jDYZpQvNZay7QAEi/5DLubay40iGYtLzya/jbjRPLyOTZhEKyL1MzPuw2HqBCjceYA==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-react": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@azure/msal-react/-/msal-react-2.2.0.tgz", + "integrity": "sha512-2V+9JXeXyyjYNF92y5u0tU4el9px/V1+vkRuN+DtoxyiMHCtYQpJoaFdGWArh43zhz5aqQqiGW/iajPDSu3QsQ==", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@azure/msal-browser": "^3.27.0", + "react": "^16.8.0 || ^17 || ^18" + } + }, "node_modules/@babel/code-frame": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.5.tgz", - "integrity": "sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.25.7.tgz", + "integrity": "sha512-0xZJFNE5XMpENsgfHYTw8FbX4kv53mFLn2i3XPoq69LyhYSCBJtitaHx9QnsVTrsogI4Z3+HtEfZ2/GFPOtf5g==", "dev": true, "dependencies": { - "@babel/highlight": "^7.22.5" + "@babel/highlight": "^7.25.7", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/compat-data": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.6.tgz", - "integrity": "sha512-29tfsWTq2Ftu7MXmimyC0C5FDZv5DYxOZkh3XD3+QW4V/BYuv/LyEsjj3c0hqedEaDt6DBfDvexMKU8YevdqFg==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.7.tgz", + "integrity": "sha512-9ickoLz+hcXCeh7jrcin+/SLWm+GkxE2kTvoYyp38p4WkdFXfQJxDFGWp/YHjiKLPx06z2A7W8XKuqbReXDzsw==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.22.8", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.8.tgz", - "integrity": "sha512-75+KxFB4CZqYRXjx4NlR4J7yGvKumBuZTmV4NV6v09dVXXkuYVYLT68N6HCzLvfJ+fWCxQsntNzKwwIXL4bHnw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.7.tgz", + "integrity": "sha512-yJ474Zv3cwiSOO9nXJuqzvwEeM+chDuQ8GJirw+pZ91sCGCyOZ3dJkVE09fTV0VEVzXyLWhh3G/AolYTPX7Mow==", "dev": true, "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.22.5", - "@babel/generator": "^7.22.7", - "@babel/helper-compilation-targets": "^7.22.6", - "@babel/helper-module-transforms": "^7.22.5", - "@babel/helpers": "^7.22.6", - "@babel/parser": "^7.22.7", - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.8", - "@babel/types": "^7.22.5", - "@nicolo-ribaudo/semver-v6": "^6.3.3", - "convert-source-map": "^1.7.0", + "@babel/code-frame": "^7.25.7", + "@babel/generator": "^7.25.7", + "@babel/helper-compilation-targets": "^7.25.7", + "@babel/helper-module-transforms": "^7.25.7", + "@babel/helpers": "^7.25.7", + "@babel/parser": "^7.25.7", + "@babel/template": "^7.25.7", + "@babel/traverse": "^7.25.7", + "@babel/types": "^7.25.7", + "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", - "json5": "^2.2.2" + "json5": "^2.2.3", + "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" @@ -94,197 +144,152 @@ } }, "node_modules/@babel/generator": { - "version": "7.22.7", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.22.7.tgz", - "integrity": "sha512-p+jPjMG+SI8yvIaxGgeW24u7q9+5+TGpZh8/CuB7RhBKd7RCy8FayNEFNNKrNK/eUcY/4ExQqLmyrvBXKsIcwQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.7.tgz", + "integrity": "sha512-5Dqpl5fyV9pIAD62yK9P7fcA768uVPUyrQmqpqstHWgMma4feF1x/oFysBCVZLY5wJ2GkMUCdsNDnGZrPoR6rA==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5", - "@jridgewell/gen-mapping": "^0.3.2", - "@jridgewell/trace-mapping": "^0.3.17", - "jsesc": "^2.5.1" + "@babel/types": "^7.25.7", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.6.tgz", - "integrity": "sha512-534sYEqWD9VfUm3IPn2SLcH4Q3P86XL+QvqdC7ZsFrzyyPF3T4XGiVghF6PTYNdWg6pXuoqXxNQAhbYeEInTzA==", - "dev": true, - "dependencies": { - "@babel/compat-data": "^7.22.6", - "@babel/helper-validator-option": "^7.22.5", - "@nicolo-ribaudo/semver-v6": "^6.3.3", - "browserslist": "^4.21.9", - "lru-cache": "^5.1.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz", - "integrity": "sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz", - "integrity": "sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.7.tgz", + "integrity": "sha512-DniTEax0sv6isaw6qSQSfV4gVRNtw2rte8HHM45t9ZR0xILaufBRNkpMifCRiAPyvL4ACD6v0gfCwCmtOQaV4A==", "dev": true, "dependencies": { - "@babel/template": "^7.22.5", - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", - "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", - "dev": true, - "dependencies": { - "@babel/types": "^7.22.5" + "@babel/compat-data": "^7.25.7", + "@babel/helper-validator-option": "^7.25.7", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz", - "integrity": "sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.7.tgz", + "integrity": "sha512-o0xCgpNmRohmnoWKQ0Ij8IdddjyBFE4T2kagL/x6M3+4zUgc+4qTOUBoNe4XxDskt1HPKO007ZPiMgLDq2s7Kw==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5" + "@babel/traverse": "^7.25.7", + "@babel/types": "^7.25.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz", - "integrity": "sha512-+hGKDt/Ze8GFExiVHno/2dvG5IdstpzCq0y4Qc9OJ25D4q3pKfiIP/4Vp3/JvhDkLKsDK2api3q3fpIgiIF5bw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.7.tgz", + "integrity": "sha512-k/6f8dKG3yDz/qCwSM+RKovjMix563SLxQFo0UhRNo239SP6n9u5/eLtKD6EAjwta2JHJ49CsD8pms2HdNiMMQ==", "dev": true, "dependencies": { - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-module-imports": "^7.22.5", - "@babel/helper-simple-access": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.5", - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/helper-module-imports": "^7.25.7", + "@babel/helper-simple-access": "^7.25.7", + "@babel/helper-validator-identifier": "^7.25.7", + "@babel/traverse": "^7.25.7" }, "engines": { "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz", - "integrity": "sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.7.tgz", + "integrity": "sha512-Rq76wjt7yz9AAc1KnlRKNAi/dMSVWgDRx43FHoJEbcYU6xOWaE2dVPwcdTukJrjxS65GITyfbvEYHvkirZ6uEg==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-simple-access": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz", - "integrity": "sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==", - "dev": true, - "dependencies": { - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz", - "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.25.7.tgz", + "integrity": "sha512-FPGAkJmyoChQeM+ruBGIDyrT2tKfZJO8NcxdC+CWNJi7N8/rZpSxK7yvBJ5O/nF1gfu5KzN7VKG3YVSLFfRSxQ==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5" + "@babel/traverse": "^7.25.7", + "@babel/types": "^7.25.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", - "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.7.tgz", + "integrity": "sha512-CbkjYdsJNHFk8uqpEkpCvRs3YRp9tY6FmFY7wLMSYuGYkrdUi7r2lc4/wqsvlHoMznX3WJ9IP8giGPq68T/Y6g==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz", - "integrity": "sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.7.tgz", + "integrity": "sha512-AM6TzwYqGChO45oiuPqwL2t20/HdMC1rTPAesnBCgPCSF1x3oN9MVUwQV2iyz4xqWrctwK5RNC8LV22kaQCNYg==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz", - "integrity": "sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.7.tgz", + "integrity": "sha512-ytbPLsm+GjArDYXJ8Ydr1c/KJuutjF2besPNbIZnZ6MKUxi/uTA22t2ymmA4WFjZFpjiAMO0xuuJPqK2nvDVfQ==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.6.tgz", - "integrity": "sha512-YjDs6y/fVOYFV8hAf1rxd1QvR9wJe1pDBZ2AREKq/SDayfPzgk0PBnVuTCE5X1acEpMMNOVUqoe+OwiZGJ+OaA==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.7.tgz", + "integrity": "sha512-Sv6pASx7Esm38KQpF/U/OXLwPPrdGHNKoeblRxgZRLXnAtnkEe4ptJPDtAZM7fBLadbc1Q07kQpSiGQ0Jg6tRA==", "dev": true, "dependencies": { - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.6", - "@babel/types": "^7.22.5" + "@babel/template": "^7.25.7", + "@babel/types": "^7.25.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.5.tgz", - "integrity": "sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.25.7.tgz", + "integrity": "sha512-iYyACpW3iW8Fw+ZybQK+drQre+ns/tKpXbNESfrhNnPLIklLbXr7MYJ6gPEd0iETGLOK+SxMjVvKb/ffmk+FEw==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.22.5", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" + "@babel/helper-validator-identifier": "^7.25.7", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.22.7", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.22.7.tgz", - "integrity": "sha512-7NF8pOkHP5o2vpmGgNGcfAeCvOYhGLyA3Z4eBQkT1RJlWu47n63bCs93QfJ2hIAFCil7L5P2IWhs1oToVgrL0Q==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.7.tgz", + "integrity": "sha512-aZn7ETtQsjjGG5HruveUK06cU3Hljuhd9Iojm4M8WWv3wLE6OkE5PWbDUkItmMgegmccaITudyuW5RPYrYlgWw==", "dev": true, + "dependencies": { + "@babel/types": "^7.25.7" + }, "bin": { "parser": "bin/babel-parser.js" }, @@ -293,12 +298,12 @@ } }, "node_modules/@babel/plugin-transform-react-jsx-self": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.22.5.tgz", - "integrity": "sha512-nTh2ogNUtxbiSbxaT4Ds6aXnXEipHweN9YRgOX/oNXdf0cCrGn/+2LozFa3lnPV5D90MkjhgckCPBrsoSc1a7g==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.24.7.tgz", + "integrity": "sha512-fOPQYbGSgH0HUp4UJO4sMBFjY6DuWq+2i8rixyUMb3CdGixs/gccURvYOAhajBdKDoGajFr3mUq5rH3phtkGzw==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.7" }, "engines": { "node": ">=6.9.0" @@ -308,12 +313,12 @@ } }, "node_modules/@babel/plugin-transform-react-jsx-source": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.22.5.tgz", - "integrity": "sha512-yIiRO6yobeEIaI0RTbIr8iAK9FcBHLtZq0S89ZPjDLQXBA4xvghaKqI0etp/tF3htTM0sazJKKLz9oEiGRtu7w==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.24.7.tgz", + "integrity": "sha512-J2z+MWzZHVOemyLweMqngXrgGC42jQ//R0KdxqkIz/OrbVIIlhFI3WigZ5fO+nwFvBlncr4MGapd8vTyc7RPNQ==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.7" }, "engines": { "node": ">=6.9.0" @@ -323,45 +328,43 @@ } }, "node_modules/@babel/runtime": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz", - "integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.0.tgz", + "integrity": "sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==", + "license": "MIT", "dependencies": { - "regenerator-runtime": "^0.13.11" + "regenerator-runtime": "^0.14.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/template": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.5.tgz", - "integrity": "sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.7.tgz", + "integrity": "sha512-wRwtAgI3bAS+JGU2upWNL9lSlDcRCqD05BZ1n3X2ONLH1WilFP6O1otQjeMK/1g0pvYcXC7b/qVUB1keofjtZA==", "dev": true, "dependencies": { - "@babel/code-frame": "^7.22.5", - "@babel/parser": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/code-frame": "^7.25.7", + "@babel/parser": "^7.25.7", + "@babel/types": "^7.25.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.22.8", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.22.8.tgz", - "integrity": "sha512-y6LPR+wpM2I3qJrsheCTwhIinzkETbplIgPBbwvqPKc+uljeA5gP+3nP8irdYt1mjQaDnlIcG+dw8OjAco4GXw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.22.5", - "@babel/generator": "^7.22.7", - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-function-name": "^7.22.5", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.22.7", - "@babel/types": "^7.22.5", - "debug": "^4.1.0", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.7.tgz", + "integrity": "sha512-jatJPT1Zjqvh/1FyJs6qAHL+Dzb7sTb+xr7Q+gM1b+1oBsMsQQ4FkVKb6dFlJvLlVssqkRzV05Jzervt9yhnzg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.25.7", + "@babel/generator": "^7.25.7", + "@babel/parser": "^7.25.7", + "@babel/template": "^7.25.7", + "@babel/types": "^7.25.7", + "debug": "^4.3.1", "globals": "^11.1.0" }, "engines": { @@ -369,13 +372,13 @@ } }, "node_modules/@babel/types": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.22.5.tgz", - "integrity": "sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.7.tgz", + "integrity": "sha512-vwIVdXG+j+FOpkwqHRcBgHLYNL7XMkufrlaFvL9o6Ai9sJn9+PdyIL5qa0XzTZw084c+u9LOls53eoZWP/W5WQ==", "dev": true, "dependencies": { - "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.5", + "@babel/helper-string-parser": "^7.25.7", + "@babel/helper-validator-identifier": "^7.25.7", "to-fast-properties": "^2.0.0" }, "engines": { @@ -383,14 +386,31 @@ } }, "node_modules/@emotion/hash": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.0.tgz", - "integrity": "sha512-14FtKiHhy2QoPIzdTcvh//8OyBlknNs2nXRwIhG904opCby3l+9Xaf/wuPvICBF0rc1ZCNBd3nKe9cd2mecVkQ==" + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz", + "integrity": "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g==", + "license": "MIT" + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } }, "node_modules/@esbuild/android-arm": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.11.tgz", - "integrity": "sha512-q4qlUf5ucwbUJZXF5tEQ8LF7y0Nk4P58hOsGk3ucY0oCwgQqAnqXVbUuahCddVHfrxmpyewRpiTHwVHIETYu7Q==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", "cpu": [ "arm" ], @@ -404,9 +424,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.11.tgz", - "integrity": "sha512-snieiq75Z1z5LJX9cduSAjUr7vEI1OdlzFPMw0HH5YI7qQHDd3qs+WZoMrWYDsfRJSq36lIA6mfZBkvL46KoIw==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", "cpu": [ "arm64" ], @@ -420,9 +440,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.11.tgz", - "integrity": "sha512-iPuoxQEV34+hTF6FT7om+Qwziv1U519lEOvekXO9zaMMlT9+XneAhKL32DW3H7okrCOBQ44BMihE8dclbZtTuw==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", "cpu": [ "x64" ], @@ -436,9 +456,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.11.tgz", - "integrity": "sha512-Gm0QkI3k402OpfMKyQEEMG0RuW2LQsSmI6OeO4El2ojJMoF5NLYb3qMIjvbG/lbMeLOGiW6ooU8xqc+S0fgz2w==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", "cpu": [ "arm64" ], @@ -452,9 +472,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.11.tgz", - "integrity": "sha512-N15Vzy0YNHu6cfyDOjiyfJlRJCB/ngKOAvoBf1qybG3eOq0SL2Lutzz9N7DYUbb7Q23XtHPn6lMDF6uWbGv9Fw==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", "cpu": [ "x64" ], @@ -468,9 +488,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.11.tgz", - "integrity": "sha512-atEyuq6a3omEY5qAh5jIORWk8MzFnCpSTUruBgeyN9jZq1K/QI9uke0ATi3MHu4L8c59CnIi4+1jDKMuqmR71A==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", "cpu": [ "arm64" ], @@ -484,9 +504,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.11.tgz", - "integrity": "sha512-XtuPrEfBj/YYYnAAB7KcorzzpGTvOr/dTtXPGesRfmflqhA4LMF0Gh/n5+a9JBzPuJ+CGk17CA++Hmr1F/gI0Q==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", "cpu": [ "x64" ], @@ -500,9 +520,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.11.tgz", - "integrity": "sha512-Idipz+Taso/toi2ETugShXjQ3S59b6m62KmLHkJlSq/cBejixmIydqrtM2XTvNCywFl3VC7SreSf6NV0i6sRyg==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", "cpu": [ "arm" ], @@ -516,9 +536,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.11.tgz", - "integrity": "sha512-c6Vh2WS9VFKxKZ2TvJdA7gdy0n6eSy+yunBvv4aqNCEhSWVor1TU43wNRp2YLO9Vng2G+W94aRz+ILDSwAiYog==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", "cpu": [ "arm64" ], @@ -532,9 +552,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.11.tgz", - "integrity": "sha512-S3hkIF6KUqRh9n1Q0dSyYcWmcVa9Cg+mSoZEfFuzoYXXsk6196qndrM+ZiHNwpZKi3XOXpShZZ+9dfN5ykqjjw==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", "cpu": [ "ia32" ], @@ -548,9 +568,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.11.tgz", - "integrity": "sha512-MRESANOoObQINBA+RMZW+Z0TJWpibtE7cPFnahzyQHDCA9X9LOmGh68MVimZlM9J8n5Ia8lU773te6O3ILW8kw==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", "cpu": [ "loong64" ], @@ -564,9 +584,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.11.tgz", - "integrity": "sha512-qVyPIZrXNMOLYegtD1u8EBccCrBVshxMrn5MkuFc3mEVsw7CCQHaqZ4jm9hbn4gWY95XFnb7i4SsT3eflxZsUg==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", "cpu": [ "mips64el" ], @@ -580,9 +600,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.11.tgz", - "integrity": "sha512-T3yd8vJXfPirZaUOoA9D2ZjxZX4Gr3QuC3GztBJA6PklLotc/7sXTOuuRkhE9W/5JvJP/K9b99ayPNAD+R+4qQ==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", "cpu": [ "ppc64" ], @@ -596,9 +616,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.11.tgz", - "integrity": "sha512-evUoRPWiwuFk++snjH9e2cAjF5VVSTj+Dnf+rkO/Q20tRqv+644279TZlPK8nUGunjPAtQRCj1jQkDAvL6rm2w==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", "cpu": [ "riscv64" ], @@ -612,9 +632,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.11.tgz", - "integrity": "sha512-/SlRJ15XR6i93gRWquRxYCfhTeC5PdqEapKoLbX63PLCmAkXZHY2uQm2l9bN0oPHBsOw2IswRZctMYS0MijFcg==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", "cpu": [ "s390x" ], @@ -628,9 +648,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.11.tgz", - "integrity": "sha512-xcncej+wF16WEmIwPtCHi0qmx1FweBqgsRtEL1mSHLFR6/mb3GEZfLQnx+pUDfRDEM4DQF8dpXIW7eDOZl1IbA==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", "cpu": [ "x64" ], @@ -644,9 +664,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.11.tgz", - "integrity": "sha512-aSjMHj/F7BuS1CptSXNg6S3M4F3bLp5wfFPIJM+Km2NfIVfFKhdmfHF9frhiCLIGVzDziggqWll0B+9AUbud/Q==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", "cpu": [ "x64" ], @@ -660,9 +680,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.11.tgz", - "integrity": "sha512-tNBq+6XIBZtht0xJGv7IBB5XaSyvYPCm1PxJ33zLQONdZoLVM0bgGqUrXnJyiEguD9LU4AHiu+GCXy/Hm9LsdQ==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", "cpu": [ "x64" ], @@ -676,9 +696,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.11.tgz", - "integrity": "sha512-kxfbDOrH4dHuAAOhr7D7EqaYf+W45LsAOOhAet99EyuxxQmjbk8M9N4ezHcEiCYPaiW8Dj3K26Z2V17Gt6p3ng==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", "cpu": [ "x64" ], @@ -692,9 +712,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.11.tgz", - "integrity": "sha512-Sh0dDRyk1Xi348idbal7lZyfSkjhJsdFeuC13zqdipsvMetlGiFQNdO+Yfp6f6B4FbyQm7qsk16yaZk25LChzg==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", "cpu": [ "arm64" ], @@ -708,9 +728,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.11.tgz", - "integrity": "sha512-o9JUIKF1j0rqJTFbIoF4bXj6rvrTZYOrfRcGyL0Vm5uJ/j5CkBD/51tpdxe9lXEDouhRgdr/BYzUrDOvrWwJpg==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", "cpu": [ "ia32" ], @@ -724,9 +744,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.11.tgz", - "integrity": "sha512-rQI4cjLHd2hGsM1LqgDI7oOCYbQ6IBOVsX9ejuRMSze0GqXUG2ekwiKkiBU1pRGSeCqFFHxTrcEydB2Hyoz9CA==", + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", "cpu": [ "x64" ], @@ -739,44 +759,70 @@ "node": ">=12" } }, + "node_modules/@floating-ui/core": { + "version": "1.6.8", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.8.tgz", + "integrity": "sha512-7XJ9cPU+yI2QeLS+FCSlqNFZJq8arvswefkZrYI1yQBbftw6FyrZOxYSh+9S7z7TpeWlRt9zJ5IhM1WIL334jA==", + "dependencies": { + "@floating-ui/utils": "^0.2.8" + } + }, + "node_modules/@floating-ui/devtools": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@floating-ui/devtools/-/devtools-0.2.1.tgz", + "integrity": "sha512-8PHJLbD6VhBh+LJ1uty/Bz30qs02NXCE5u8WpOhSewlYXUWl03GNXknr9AS2yaAWJEQaY27x7eByJs44gODBcw==", + "peerDependencies": { + "@floating-ui/dom": ">=1.5.4" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.6.12", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.12.tgz", + "integrity": "sha512-NP83c0HjokcGVEMeoStg317VD9W7eDlGK7457dMBANbKA6GJZdc7rjujdgqzTaz93jkGgc5P/jeWbaCHnMNc+w==", + "dependencies": { + "@floating-ui/core": "^1.6.0", + "@floating-ui/utils": "^0.2.8" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.8.tgz", + "integrity": "sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==" + }, "node_modules/@fluentui/date-time-utilities": { - "version": "8.5.13", - "resolved": "https://registry.npmjs.org/@fluentui/date-time-utilities/-/date-time-utilities-8.5.13.tgz", - "integrity": "sha512-X3clbPKh0URkDj21QoARw6SNec7dWg7Gt7SkTlkVYFzmZUdC4ZIrYk3n36xKe3U1wcGp26EVmKjhAhB262ugpw==", + "version": "8.5.14", + "license": "MIT", "dependencies": { - "@fluentui/set-version": "^8.2.11", + "@fluentui/set-version": "^8.2.12", "tslib": "^2.1.0" } }, "node_modules/@fluentui/dom-utilities": { - "version": "2.2.11", - "resolved": "https://registry.npmjs.org/@fluentui/dom-utilities/-/dom-utilities-2.2.11.tgz", - "integrity": "sha512-2tXfg7/9PXu9nfU72/P3o3waHEFEQtHUfQbVexUaYqNNAxMj6sOfsqpUx4vd5nPgO+grSWrl+spqlLN2yej51w==", + "version": "2.2.12", + "license": "MIT", "dependencies": { - "@fluentui/set-version": "^8.2.11", + "@fluentui/set-version": "^8.2.12", "tslib": "^2.1.0" } }, "node_modules/@fluentui/font-icons-mdl2": { - "version": "8.5.23", - "resolved": "https://registry.npmjs.org/@fluentui/font-icons-mdl2/-/font-icons-mdl2-8.5.23.tgz", - "integrity": "sha512-jZjUtfQm9/84jX34zhwwsoZME86xXXgKAgBYuMvRStKzXGdZcd7YSOlmuT8lbISmtFL/SWwUGOEal1nLCUNeNA==", + "version": "8.5.26", + "license": "MIT", "dependencies": { - "@fluentui/set-version": "^8.2.11", - "@fluentui/style-utilities": "^8.9.16", - "@fluentui/utilities": "^8.13.18", + "@fluentui/set-version": "^8.2.12", + "@fluentui/style-utilities": "^8.9.19", + "@fluentui/utilities": "^8.13.20", "tslib": "^2.1.0" } }, "node_modules/@fluentui/foundation-legacy": { - "version": "8.2.43", - "resolved": "https://registry.npmjs.org/@fluentui/foundation-legacy/-/foundation-legacy-8.2.43.tgz", - "integrity": "sha512-rXr71KxNcWDH2LmTsFZbP75p8HssLlVLaFAqEdLE+sKf/LNKmqkDVTNhDbHZxzxy0QnguI4aNHcyGhMZUH3MPA==", - "dependencies": { - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "@fluentui/style-utilities": "^8.9.16", - "@fluentui/utilities": "^8.13.18", + "version": "8.2.46", + "license": "MIT", + "dependencies": { + "@fluentui/merge-styles": "^8.5.13", + "@fluentui/set-version": "^8.2.12", + "@fluentui/style-utilities": "^8.9.19", + "@fluentui/utilities": "^8.13.20", "tslib": "^2.1.0" }, "peerDependencies": { @@ -785,39 +831,52 @@ } }, "node_modules/@fluentui/keyboard-key": { - "version": "0.4.11", - "resolved": "https://registry.npmjs.org/@fluentui/keyboard-key/-/keyboard-key-0.4.11.tgz", - "integrity": "sha512-TVB/EloWado9AVp1niChgcdDOQAHGP5B30Dinmtfe7zi8OnstwPoxwFP6dHJDdpLQ6ZEUTaEHViSzvewl7Chag==", + "version": "0.4.12", + "license": "MIT", "dependencies": { "tslib": "^2.1.0" } }, + "node_modules/@fluentui/keyboard-keys": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@fluentui/keyboard-keys/-/keyboard-keys-9.0.8.tgz", + "integrity": "sha512-iUSJUUHAyTosnXK8O2Ilbfxma+ZyZPMua5vB028Ys96z80v+LFwntoehlFsdH3rMuPsA8GaC1RE7LMezwPBPdw==", + "dependencies": { + "@swc/helpers": "^0.5.1" + } + }, "node_modules/@fluentui/merge-styles": { - "version": "8.5.12", - "resolved": "https://registry.npmjs.org/@fluentui/merge-styles/-/merge-styles-8.5.12.tgz", - "integrity": "sha512-ZnUo0YuMP7AYi68dkknFqVxopIAgbrUnqR/MZlemmRvBYyy1SMj1WQeHcoiLFA8mF8YKn7B+jxQgJbN2bfcrRw==", + "version": "8.5.13", + "license": "MIT", "dependencies": { - "@fluentui/set-version": "^8.2.11", + "@fluentui/set-version": "^8.2.12", "tslib": "^2.1.0" } }, + "node_modules/@fluentui/priority-overflow": { + "version": "9.1.14", + "resolved": "https://registry.npmjs.org/@fluentui/priority-overflow/-/priority-overflow-9.1.14.tgz", + "integrity": "sha512-tIH8EhvjZF4MhxSjqrWOyodrQQW+RlVZqxuNFQF5OWRdSqcIK8g+Z+UbC5fYHQooCgVsthk2mFurfGMKFtf9ug==", + "dependencies": { + "@swc/helpers": "^0.5.1" + } + }, "node_modules/@fluentui/react": { - "version": "8.110.7", - "resolved": "https://registry.npmjs.org/@fluentui/react/-/react-8.110.7.tgz", - "integrity": "sha512-3sn4HZL10jghiYFF+Ouc7pNDJ5pR2ueU6ZY1IdmVFgYXTJJ/IwQhVc37mXVf8VoUM7hF4vRcGE4z+loNTpTX0w==", - "dependencies": { - "@fluentui/date-time-utilities": "^8.5.13", - "@fluentui/font-icons-mdl2": "^8.5.23", - "@fluentui/foundation-legacy": "^8.2.43", - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/react-focus": "^8.8.30", - "@fluentui/react-hooks": "^8.6.29", - "@fluentui/react-portal-compat-context": "^9.0.6", - "@fluentui/react-window-provider": "^2.2.15", - "@fluentui/set-version": "^8.2.11", - "@fluentui/style-utilities": "^8.9.16", - "@fluentui/theme": "^2.6.34", - "@fluentui/utilities": "^8.13.18", + "version": "8.112.5", + "license": "MIT", + "dependencies": { + "@fluentui/date-time-utilities": "^8.5.14", + "@fluentui/font-icons-mdl2": "^8.5.26", + "@fluentui/foundation-legacy": "^8.2.46", + "@fluentui/merge-styles": "^8.5.13", + "@fluentui/react-focus": "^8.8.33", + "@fluentui/react-hooks": "^8.6.32", + "@fluentui/react-portal-compat-context": "^9.0.9", + "@fluentui/react-window-provider": "^2.2.16", + "@fluentui/set-version": "^8.2.12", + "@fluentui/style-utilities": "^8.9.19", + "@fluentui/theme": "^2.6.37", + "@fluentui/utilities": "^8.13.20", "@microsoft/load-themed-styles": "^1.10.26", "tslib": "^2.1.0" }, @@ -828,2096 +887,5329 @@ "react-dom": ">=16.8.0 <19.0.0" } }, - "node_modules/@fluentui/react-focus": { - "version": "8.8.30", - "resolved": "https://registry.npmjs.org/@fluentui/react-focus/-/react-focus-8.8.30.tgz", - "integrity": "sha512-dKQQtNTZbQOE+u/Tmh7AbtJPSpzQNI0L8o55a22y4U7s33rizUd++CIiToXsB+bPvlotcmpZswZQ8V06zM4KIw==", - "dependencies": { - "@fluentui/keyboard-key": "^0.4.11", - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "@fluentui/style-utilities": "^8.9.16", - "@fluentui/utilities": "^8.13.18", - "tslib": "^2.1.0" + "node_modules/@fluentui/react-accordion": { + "version": "9.5.8", + "resolved": "https://registry.npmjs.org/@fluentui/react-accordion/-/react-accordion-9.5.8.tgz", + "integrity": "sha512-tYkHFbNfJG1/qSzkdagSGZoL9LlRp1/ei0TwezDq9M41rGZWHz+qDRkPlw/f66YWT006tR1zR1voJYhshsJ21g==", + "dependencies": { + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-motion": "^9.6.1", + "@fluentui/react-motion-components-preview": "^0.3.0", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "@types/react": ">=16.8.0 <19.0.0", - "react": ">=16.8.0 <19.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@fluentui/react-hooks": { - "version": "8.6.29", - "resolved": "https://registry.npmjs.org/@fluentui/react-hooks/-/react-hooks-8.6.29.tgz", - "integrity": "sha512-MeVevmGJtrYxdhoarrkVWE0Hs4XdzOc9A3tiOjMBIcwOvoOYOAoOELoHK/wuulPVwUn2R9Y+7JpJ6oCe4ImdJw==", + "node_modules/@fluentui/react-alert": { + "version": "9.0.0-beta.124", + "resolved": "https://registry.npmjs.org/@fluentui/react-alert/-/react-alert-9.0.0-beta.124.tgz", + "integrity": "sha512-yFBo3B5H9hnoaXxlkuz8wRz04DEyQ+ElYA/p5p+Vojf19Zuta8DmFZZ6JtWdtxcdnnQ4LvAfC5OYYlzdReozPA==", + "license": "MIT", "dependencies": { - "@fluentui/react-window-provider": "^2.2.15", - "@fluentui/set-version": "^8.2.11", - "@fluentui/utilities": "^8.13.18", - "tslib": "^2.1.0" + "@fluentui/react-avatar": "^9.6.29", + "@fluentui/react-button": "^9.3.83", + "@fluentui/react-icons": "^2.0.239", + "@fluentui/react-jsx-runtime": "^9.0.39", + "@fluentui/react-tabster": "^9.21.5", + "@fluentui/react-theme": "^9.1.19", + "@fluentui/react-utilities": "^9.18.10", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "@types/react": ">=16.8.0 <19.0.0", - "react": ">=16.8.0 <19.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@fluentui/react-icons": { - "version": "2.0.206", - "resolved": "https://registry.npmjs.org/@fluentui/react-icons/-/react-icons-2.0.206.tgz", - "integrity": "sha512-Hq+QCAQbmIR8Pi7eihNQQgV3NmBcNP7j0Px1gBusgqK+RfttE0ubLym3SwQ3/Zz4neaDgAa9QKoArgEt3HorvA==", + "node_modules/@fluentui/react-aria": { + "version": "9.13.9", + "resolved": "https://registry.npmjs.org/@fluentui/react-aria/-/react-aria-9.13.9.tgz", + "integrity": "sha512-YURuZ2Nh7hz5VlCQ9NHLvzyqdiJhElm4aW/F4JRmXAoMdeDCfgG0UGL82DDPZL6eNYIjhQN8WpRXH2tfxJ80HA==", "dependencies": { - "@griffel/react": "^1.0.0", - "tslib": "^2.1.0" + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-utilities": "^9.18.17", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "react": ">=16.8.0 <19.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@fluentui/react-portal-compat-context": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/@fluentui/react-portal-compat-context/-/react-portal-compat-context-9.0.6.tgz", - "integrity": "sha512-HUt0/YXKRB4chtzlGbZ+7y7FHFyqaI0CeMFAe/QBXVOiOwA01QOr2j4Uky+30vupspIt6mjodLanuw1jMybmqQ==", + "node_modules/@fluentui/react-avatar": { + "version": "9.6.43", + "resolved": "https://registry.npmjs.org/@fluentui/react-avatar/-/react-avatar-9.6.43.tgz", + "integrity": "sha512-N/bHM7ZriCrUupZ0jgK+cUHuOymIvs3JMxME6z/6711xwHH9PRM0vpu17O+oYsnwatELDaGsN5MWV4T6x1UDVA==", "dependencies": { - "@swc/helpers": "^0.4.14" + "@fluentui/react-badge": "^9.2.45", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-popover": "^9.9.25", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-tooltip": "^9.4.43", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "@types/react": ">=16.8.0 <19.0.0", - "react": ">=16.8.0 <19.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@fluentui/react-window-provider": { - "version": "2.2.15", - "resolved": "https://registry.npmjs.org/@fluentui/react-window-provider/-/react-window-provider-2.2.15.tgz", - "integrity": "sha512-RraWvRe7wakpPJRBX2tlCV/cybOKiqLJ1UBLPNf5xq7ZIs0T0g/hh3G3Zb5teOeipjuRnl6srkdDUT9Dy9wrBg==", + "node_modules/@fluentui/react-badge": { + "version": "9.2.45", + "resolved": "https://registry.npmjs.org/@fluentui/react-badge/-/react-badge-9.2.45.tgz", + "integrity": "sha512-X1dDCs0ZjQNx46VUAWYVvVfufARNtOQoXmcdldtd8kWnLDA4aAVI+/CX4bhZ/+qV9hiIowffuW/QPhNXWSozVQ==", "dependencies": { - "@fluentui/set-version": "^8.2.11", - "tslib": "^2.1.0" + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "@types/react": ">=16.8.0 <19.0.0", - "react": ">=16.8.0 <19.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@fluentui/set-version": { - "version": "8.2.11", - "resolved": "https://registry.npmjs.org/@fluentui/set-version/-/set-version-8.2.11.tgz", - "integrity": "sha512-UI03tysau/adBO1a3q4uFZWQ3lfkiFcAWIFng4k5odWcCokfCm5IxA0urKqj5W5JRYdyoBUaq8QbcNGkFB4dCw==", + "node_modules/@fluentui/react-breadcrumb": { + "version": "9.0.43", + "resolved": "https://registry.npmjs.org/@fluentui/react-breadcrumb/-/react-breadcrumb-9.0.43.tgz", + "integrity": "sha512-kVve9azEzJn/6aZU1Hv2KVd3INkoSbX5kbIVUzDdsMZYeFpYp0V9Fz/akwa9jhSkONdqCpKpI/BbT8wRjWky9g==", "dependencies": { - "tslib": "^2.1.0" + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-button": "^9.3.95", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-link": "^9.3.2", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@fluentui/style-utilities": { - "version": "8.9.16", - "resolved": "https://registry.npmjs.org/@fluentui/style-utilities/-/style-utilities-8.9.16.tgz", - "integrity": "sha512-8hS5HscCFYvcWjAdk37frPZJZthr7f/cu5db7gjrPy+DEhf13WAZRHsropWm17+8GhJhvKt98BQf/Kzxtt34Eg==", - "dependencies": { - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "@fluentui/theme": "^2.6.34", - "@fluentui/utilities": "^8.13.18", - "@microsoft/load-themed-styles": "^1.10.26", - "tslib": "^2.1.0" + "node_modules/@fluentui/react-button": { + "version": "9.3.95", + "resolved": "https://registry.npmjs.org/@fluentui/react-button/-/react-button-9.3.95.tgz", + "integrity": "sha512-kvwxBrCLXeFkgVy1+n01BZmRnEE/uPtapkUSInIXf8qQgOZzpLirLfrDqjBsTMd1Wosv9zgh27gqbiw92cqQSg==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@fluentui/theme": { - "version": "2.6.34", - "resolved": "https://registry.npmjs.org/@fluentui/theme/-/theme-2.6.34.tgz", - "integrity": "sha512-2Ssi3sX2snnbPJ4PmxbpCDCGePRE36tvGj2qKgdKiSh/fPVsg1b+Q50YlpFl9sXmbhl1uFmxjAx6WPsVGTl7vQ==", + "node_modules/@fluentui/react-card": { + "version": "9.0.97", + "resolved": "https://registry.npmjs.org/@fluentui/react-card/-/react-card-9.0.97.tgz", + "integrity": "sha512-E8Rjkn88muKdn3ACn+WzpTsQYX/ldgZvuRT42PTdrIXeFsQ9RAWJ6TkMf5/FURxKlR29ChT5kIyCH/EzZ+iB0g==", "dependencies": { - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "@fluentui/utilities": "^8.13.18", - "tslib": "^2.1.0" + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-text": "^9.4.27", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "@types/react": ">=16.8.0 <19.0.0", - "react": ">=16.8.0 <19.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@fluentui/utilities": { - "version": "8.13.18", - "resolved": "https://registry.npmjs.org/@fluentui/utilities/-/utilities-8.13.18.tgz", - "integrity": "sha512-/0rX9EzltLKwU1SS14VV7agWoOzruVTU3oagZq1QgFAvoj8qi7fNqvSX/VEeRy+0gmbsCkrEViUPkmC7drKzPg==", + "node_modules/@fluentui/react-carousel": { + "version": "9.3.1", + "resolved": "https://registry.npmjs.org/@fluentui/react-carousel/-/react-carousel-9.3.1.tgz", + "integrity": "sha512-nDUOVPAADNRlwg7/KtXgYEgALfll/Zcx7MAIqZkwxtroPzuOqm2CjeMVBwWoekEQzs75i+PgNgL1eXAQwgsAAQ==", "dependencies": { - "@fluentui/dom-utilities": "^2.2.11", - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "tslib": "^2.1.0" + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-button": "^9.3.95", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1", + "embla-carousel": "^8.3.0", + "embla-carousel-autoplay": "^8.3.0", + "embla-carousel-fade": "^8.3.0" }, "peerDependencies": { - "@types/react": ">=16.8.0 <19.0.0", - "react": ">=16.8.0 <19.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@griffel/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@griffel/core/-/core-1.10.0.tgz", - "integrity": "sha512-9yIBFswd6pcxtYsDVngplCHTyZ++cIk0htBOBVjxBKEoTkEmTgSvbIB2kKMiO3OJLrjzwoi9r+s3owugzIZe1w==", + "node_modules/@fluentui/react-checkbox": { + "version": "9.2.41", + "resolved": "https://registry.npmjs.org/@fluentui/react-checkbox/-/react-checkbox-9.2.41.tgz", + "integrity": "sha512-+vmoZIaAnN7Z9pxilXSleQJKyLoGksrU0d00huNLIOKFGIgkJHscJzrmAWDWHzFOg1MeGUtpfYYlE3L1N6ypBw==", "dependencies": { - "@emotion/hash": "^0.9.0", - "csstype": "^3.0.10", - "rtl-css-js": "^1.16.1", - "stylis": "^4.0.13", - "tslib": "^2.1.0" + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-label": "^9.1.78", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@griffel/react": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@griffel/react/-/react-1.5.5.tgz", - "integrity": "sha512-MpAU0NEpBzNRWUGSlhgz3jzZRC+HbRI+P2lQIzyxoMFgzEB4QFtDnRDBwPLfi/Eoq55NlVmsxn2Pr3jJ/bjhRw==", + "node_modules/@fluentui/react-combobox": { + "version": "9.13.12", + "resolved": "https://registry.npmjs.org/@fluentui/react-combobox/-/react-combobox-9.13.12.tgz", + "integrity": "sha512-Y710laYoJHmMu09ynLx+13hwtCLhCGqUbVdLCCQmsMzd4hCVNCuhT+ED+sJBTMp/NnyVjMDECJ11Fk5iTkUd0g==", "dependencies": { - "@griffel/core": "^1.10.0", - "tslib": "^2.1.0" + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-positioning": "^9.15.12", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "react": ">=16.8.0 <19.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", - "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", - "dev": true, + "node_modules/@fluentui/react-components": { + "version": "9.56.2", + "resolved": "https://registry.npmjs.org/@fluentui/react-components/-/react-components-9.56.2.tgz", + "integrity": "sha512-WcxdvJGPK/xhS9FnmG8QaEM5/Es1Hbggmas5DCkuj2XGEexz4zWZ73tESb7QNYpMxhOKKprln0HfbSpg6c4xOw==", "dependencies": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" + "@fluentui/react-accordion": "^9.5.8", + "@fluentui/react-alert": "9.0.0-beta.124", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-avatar": "^9.6.43", + "@fluentui/react-badge": "^9.2.45", + "@fluentui/react-breadcrumb": "^9.0.43", + "@fluentui/react-button": "^9.3.95", + "@fluentui/react-card": "^9.0.97", + "@fluentui/react-carousel": "^9.3.1", + "@fluentui/react-checkbox": "^9.2.41", + "@fluentui/react-combobox": "^9.13.12", + "@fluentui/react-dialog": "^9.11.21", + "@fluentui/react-divider": "^9.2.77", + "@fluentui/react-drawer": "^9.6.1", + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-image": "^9.1.75", + "@fluentui/react-infobutton": "9.0.0-beta.102", + "@fluentui/react-infolabel": "^9.0.50", + "@fluentui/react-input": "^9.4.93", + "@fluentui/react-label": "^9.1.78", + "@fluentui/react-link": "^9.3.2", + "@fluentui/react-menu": "^9.14.20", + "@fluentui/react-message-bar": "^9.2.15", + "@fluentui/react-motion": "^9.6.1", + "@fluentui/react-overflow": "^9.2.1", + "@fluentui/react-persona": "^9.2.102", + "@fluentui/react-popover": "^9.9.25", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-positioning": "^9.15.12", + "@fluentui/react-progress": "^9.1.91", + "@fluentui/react-provider": "^9.18.0", + "@fluentui/react-radio": "^9.2.36", + "@fluentui/react-rating": "^9.0.22", + "@fluentui/react-search": "^9.0.22", + "@fluentui/react-select": "^9.1.91", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-skeleton": "^9.1.20", + "@fluentui/react-slider": "^9.2.0", + "@fluentui/react-spinbutton": "^9.2.92", + "@fluentui/react-spinner": "^9.5.2", + "@fluentui/react-swatch-picker": "^9.1.13", + "@fluentui/react-switch": "^9.1.98", + "@fluentui/react-table": "^9.15.22", + "@fluentui/react-tabs": "^9.6.2", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-tag-picker": "^9.3.9", + "@fluentui/react-tags": "^9.3.23", + "@fluentui/react-teaching-popover": "^9.1.22", + "@fluentui/react-text": "^9.4.27", + "@fluentui/react-textarea": "^9.3.92", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-toast": "^9.3.59", + "@fluentui/react-toolbar": "^9.2.10", + "@fluentui/react-tooltip": "^9.4.43", + "@fluentui/react-tree": "^9.8.6", + "@fluentui/react-utilities": "^9.18.17", + "@fluentui/react-virtualizer": "9.0.0-alpha.87", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, - "engines": { - "node": ">=6.0.0" + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", - "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", - "dev": true, - "engines": { - "node": ">=6.0.0" + "node_modules/@fluentui/react-context-selector": { + "version": "9.1.69", + "resolved": "https://registry.npmjs.org/@fluentui/react-context-selector/-/react-context-selector-9.1.69.tgz", + "integrity": "sha512-g29PE3cya7vY85o1ZwYMhPtkUyb7Q14UdrBCeEUr7+KjTPKMbkF27GKh0fAwwFuh9talvmI6fEVkJ9odYI6Dog==", + "dependencies": { + "@fluentui/react-utilities": "^9.18.17", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0", + "scheduler": ">=0.19.0 <=0.23.0" } }, - "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", - "dev": true, - "engines": { - "node": ">=6.0.0" + "node_modules/@fluentui/react-dialog": { + "version": "9.11.21", + "resolved": "https://registry.npmjs.org/@fluentui/react-dialog/-/react-dialog-9.11.21.tgz", + "integrity": "sha512-zTBZKGG2z5gV3O9o00coN3p2wemMfiXfgTaiAb866I+htjN8/62BmzKSg32yygfVFaQnvlU1DhKAXd4SpfFAeg==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-motion": "^9.6.1", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", - "dev": true - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.18", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", - "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", - "dev": true, + "node_modules/@fluentui/react-divider": { + "version": "9.2.77", + "resolved": "https://registry.npmjs.org/@fluentui/react-divider/-/react-divider-9.2.77.tgz", + "integrity": "sha512-mo1ZhkD05p1PC8m5NnQjttIxCZnIy33wtV7w3zEtdlrpqtKvaHmOrbfJPMVVerVEZqX8SL2t5mhXX8AE/kjWyw==", "dependencies": { - "@jridgewell/resolve-uri": "3.1.0", - "@jridgewell/sourcemap-codec": "1.4.14" + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.14", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", - "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", - "dev": true - }, - "node_modules/@microsoft/load-themed-styles": { - "version": "1.10.295", - "resolved": "https://registry.npmjs.org/@microsoft/load-themed-styles/-/load-themed-styles-1.10.295.tgz", - "integrity": "sha512-W+IzEBw8a6LOOfRJM02dTT7BDZijxm+Z7lhtOAz1+y9vQm1Kdz9jlAO+qCEKsfxtUOmKilW8DIRqFw2aUgKeGg==" - }, - "node_modules/@nicolo-ribaudo/semver-v6": { - "version": "6.3.3", - "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/semver-v6/-/semver-v6-6.3.3.tgz", - "integrity": "sha512-3Yc1fUTs69MG/uZbJlLSI3JISMn2UV2rg+1D/vROUqZyh3l6iYHCs7GMp+M40ZD7yOdDbYjJcU1oTJhrc+dGKg==", - "dev": true, - "bin": { - "semver": "bin/semver.js" + "node_modules/@fluentui/react-drawer": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/@fluentui/react-drawer/-/react-drawer-9.6.1.tgz", + "integrity": "sha512-KDVwTnY72rTq7st8bAIU8vfPM1e+q2wsYOdTaxnD6qVU7EcJc5QxT/FmM0jZ300zqrwhf8r4evGMCe7KZv+I6A==", + "dependencies": { + "@fluentui/react-dialog": "^9.11.21", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-motion": "^9.6.1", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@react-spring/animated": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/animated/-/animated-9.7.3.tgz", - "integrity": "sha512-5CWeNJt9pNgyvuSzQH+uy2pvTg8Y4/OisoscZIR8/ZNLIOI+CatFBhGZpDGTF/OzdNFsAoGk3wiUYTwoJ0YIvw==", + "node_modules/@fluentui/react-field": { + "version": "9.1.80", + "resolved": "https://registry.npmjs.org/@fluentui/react-field/-/react-field-9.1.80.tgz", + "integrity": "sha512-e+rVWTq5NUV7bq+PkTx+nxEIQOgRdA1RGyr2GG70qxtfus/JQoEteYMFoOFPiK0oJ0I0BfJf4NQG1mwnov7X0w==", "dependencies": { - "@react-spring/shared": "~9.7.3", - "@react-spring/types": "~9.7.3" + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-label": "^9.1.78", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@react-spring/core": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/core/-/core-9.7.3.tgz", - "integrity": "sha512-IqFdPVf3ZOC1Cx7+M0cXf4odNLxDC+n7IN3MDcVCTIOSBfqEcBebSv+vlY5AhM0zw05PDbjKrNmBpzv/AqpjnQ==", + "node_modules/@fluentui/react-focus": { + "version": "8.8.33", + "license": "MIT", "dependencies": { - "@react-spring/animated": "~9.7.3", - "@react-spring/shared": "~9.7.3", - "@react-spring/types": "~9.7.3" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/react-spring/donate" + "@fluentui/keyboard-key": "^0.4.12", + "@fluentui/merge-styles": "^8.5.13", + "@fluentui/set-version": "^8.2.12", + "@fluentui/style-utilities": "^8.9.19", + "@fluentui/utilities": "^8.13.20", + "tslib": "^2.1.0" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + "@types/react": ">=16.8.0 <19.0.0", + "react": ">=16.8.0 <19.0.0" } }, - "node_modules/@react-spring/shared": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/shared/-/shared-9.7.3.tgz", - "integrity": "sha512-NEopD+9S5xYyQ0pGtioacLhL2luflh6HACSSDUZOwLHoxA5eku1UPuqcJqjwSD6luKjjLfiLOspxo43FUHKKSA==", + "node_modules/@fluentui/react-hooks": { + "version": "8.6.32", + "license": "MIT", "dependencies": { - "@react-spring/types": "~9.7.3" + "@fluentui/react-window-provider": "^2.2.16", + "@fluentui/set-version": "^8.2.12", + "@fluentui/utilities": "^8.13.20", + "tslib": "^2.1.0" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + "@types/react": ">=16.8.0 <19.0.0", + "react": ">=16.8.0 <19.0.0" } }, - "node_modules/@react-spring/types": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/types/-/types-9.7.3.tgz", - "integrity": "sha512-Kpx/fQ/ZFX31OtlqVEFfgaD1ACzul4NksrvIgYfIFq9JpDHFwQkMVZ10tbo0FU/grje4rcL4EIrjekl3kYwgWw==" - }, - "node_modules/@react-spring/web": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/web/-/web-9.7.3.tgz", - "integrity": "sha512-BXt6BpS9aJL/QdVqEIX9YoUy8CE6TJrU0mNCqSoxdXlIeNcEBWOfIyE6B14ENNsyQKS3wOWkiJfco0tCr/9tUg==", + "node_modules/@fluentui/react-icons": { + "version": "2.0.265", + "resolved": "https://registry.npmjs.org/@fluentui/react-icons/-/react-icons-2.0.265.tgz", + "integrity": "sha512-bpiB4LGKv7LA6BsTHYLWuK6IH7CqqJYooHJfjaQ1i90OPfXpTmV1G/HB+6dIsmbAdKS14Z2bKM6Qb+yP3Ojuyg==", "dependencies": { - "@react-spring/animated": "~9.7.3", - "@react-spring/core": "~9.7.3", - "@react-spring/shared": "~9.7.3", - "@react-spring/types": "~9.7.3" + "@griffel/react": "^1.0.0", + "tslib": "^2.1.0" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + "react": ">=16.8.0 <19.0.0" } }, - "node_modules/@remix-run/router": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.7.1.tgz", - "integrity": "sha512-bgVQM4ZJ2u2CM8k1ey70o1ePFXsEzYVZoWghh6WjM8p59jQ7HxzbHW4SbnWFG7V9ig9chLawQxDTZ3xzOF8MkQ==", - "engines": { - "node": ">=14" + "node_modules/@fluentui/react-image": { + "version": "9.1.75", + "resolved": "https://registry.npmjs.org/@fluentui/react-image/-/react-image-9.1.75.tgz", + "integrity": "sha512-pw4vL+j5/Qc9jSivfKRZ2qocx7W7BsfIFu/h8l89dg2OSvcLjUygWLYT/1KBz9oXIE8eQy6aZV/mvI3swhEWqw==", + "dependencies": { + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@swc/helpers": { - "version": "0.4.14", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.4.14.tgz", - "integrity": "sha512-4C7nX/dvpzB7za4Ql9K81xK3HPxCpHMgwTZVyf+9JQ6VUbn9jjZVN7/Nkdz/Ugzs2CSjqnL/UPXroiVBVHUWUw==", + "node_modules/@fluentui/react-infobutton": { + "version": "9.0.0-beta.102", + "resolved": "https://registry.npmjs.org/@fluentui/react-infobutton/-/react-infobutton-9.0.0-beta.102.tgz", + "integrity": "sha512-3kA4F0Vga8Ds6JGlBajLCCDOo/LmPuS786Wg7ui4ZTDYVIMzy1yp2XuVcZniifBFvEp0HQCUoDPWUV0VI3FfzQ==", + "license": "MIT", "dependencies": { - "tslib": "^2.4.0" + "@fluentui/react-icons": "^2.0.237", + "@fluentui/react-jsx-runtime": "^9.0.36", + "@fluentui/react-label": "^9.1.68", + "@fluentui/react-popover": "^9.9.6", + "@fluentui/react-tabster": "^9.21.0", + "@fluentui/react-theme": "^9.1.19", + "@fluentui/react-utilities": "^9.18.7", + "@griffel/react": "^1.5.14", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@types/dompurify": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.2.tgz", - "integrity": "sha512-YBL4ziFebbbfQfH5mlC+QTJsvh0oJUrWbmxKMyEdL7emlHJqGR2Qb34TEFKj+VCayBvjKy3xczMFNhugThUsfQ==", - "dev": true, + "node_modules/@fluentui/react-infolabel": { + "version": "9.0.50", + "resolved": "https://registry.npmjs.org/@fluentui/react-infolabel/-/react-infolabel-9.0.50.tgz", + "integrity": "sha512-NrEFOD5An+aD4SGx1q0sGdqnMT5eVURigEDW1tm1HPk+Hl0bgmwSlwQwLw9ejfaC5g5SoPwFaVVM2VKLfn9qzw==", "dependencies": { - "@types/trusted-types": "*" + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-label": "^9.1.78", + "@fluentui/react-popover": "^9.9.25", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "@types/react-dom": ">=16.8.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.8.0 <19.0.0" } }, - "node_modules/@types/prop-types": { - "version": "15.7.5", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", - "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" - }, - "node_modules/@types/react": { - "version": "18.2.14", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.14.tgz", - "integrity": "sha512-A0zjq+QN/O0Kpe30hA1GidzyFjatVvrpIvWLxD+xv67Vt91TWWgco9IvrJBkeyHm1trGaFS/FSGqPlhyeZRm0g==", + "node_modules/@fluentui/react-input": { + "version": "9.4.93", + "resolved": "https://registry.npmjs.org/@fluentui/react-input/-/react-input-9.4.93.tgz", + "integrity": "sha512-lKxB2mWYzN5bAGlYS1BMUISdAoNqKtW4d+s6vUf8lJdMFyQK4iC7QtcbS4x9FTQnSDV6cfVogp5k8JvUWs1Hww==", "dependencies": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/@types/react-dom": { - "version": "18.2.6", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.6.tgz", - "integrity": "sha512-2et4PDvg6PVCyS7fuTc4gPoksV58bW0RwSxWKcPRcHZf0PRUGq03TKcD/rUHe3azfV6/5/biUBJw+HhCQjaP0A==", + "node_modules/@fluentui/react-jsx-runtime": { + "version": "9.0.46", + "resolved": "https://registry.npmjs.org/@fluentui/react-jsx-runtime/-/react-jsx-runtime-9.0.46.tgz", + "integrity": "sha512-hdzwiRPnFQ8dqmqj/Xtep7SP2I+mx+OFsP5glzdDhTFL6au5yBbnUTgI6XEiSAbisBAhl2V2qsp0mJ55gxU+sg==", "dependencies": { - "@types/react": "*" + "@fluentui/react-utilities": "^9.18.17", + "@swc/helpers": "^0.5.1", + "react-is": "^17.0.2" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "react": ">=16.14.0 <19.0.0" } }, - "node_modules/@types/scheduler": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", - "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==" - }, - "node_modules/@types/trusted-types": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.3.tgz", - "integrity": "sha512-NfQ4gyz38SL8sDNrSixxU2Os1a5xcdFxipAFxYEuLUlvU2uDwS4NUpsImcf1//SlWItCVMMLiylsxbmNMToV/g==", - "dev": true - }, - "node_modules/@vitejs/plugin-react": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.0.2.tgz", - "integrity": "sha512-zbnVp3Esfg33zDaoLrjxG+p/dPiOtpvJA+1oOEQwSxMMTRL9zi1eghIcd2WtLjkcKnPsa3S15LzS/OzDn2BOCA==", - "dev": true, + "node_modules/@fluentui/react-label": { + "version": "9.1.78", + "resolved": "https://registry.npmjs.org/@fluentui/react-label/-/react-label-9.1.78.tgz", + "integrity": "sha512-0Tv8Du78+lt17mjkAeoJRfsZgFVbfk2INiGVsQ2caN0n/r1IStbKQVqqWFSjyw//qpFdyw3FGOL9SalPmqIZMA==", "dependencies": { - "@babel/core": "^7.22.5", - "@babel/plugin-transform-react-jsx-self": "^7.22.5", - "@babel/plugin-transform-react-jsx-source": "^7.22.5", - "react-refresh": "^0.14.0" + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, - "engines": { - "node": "^14.18.0 || >=16.0.0" + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-link": { + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/@fluentui/react-link/-/react-link-9.3.2.tgz", + "integrity": "sha512-JIq2vhcqWug+GFw0EA5hVDXGzcRz4CBd/W/Mr9swlHIsA1BLMNxfHyIfZ6kZMT9IIQltWHK4CBFx2X/5co8DcA==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, "peerDependencies": { - "vite": "^4.2.0" + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, + "node_modules/@fluentui/react-menu": { + "version": "9.14.20", + "resolved": "https://registry.npmjs.org/@fluentui/react-menu/-/react-menu-9.14.20.tgz", + "integrity": "sha512-zinFHhQi2bwhv7GL8JXHwAfRYWw3hJhlUuWejLGQK1QbmwPlBHN6UCKhhIvF+RwEJbzeoyqvZcAusiHjmCp6rw==", "dependencies": { - "color-convert": "^1.9.0" + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-positioning": "^9.15.12", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" }, - "engines": { - "node": ">=4" + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" } }, - "node_modules/browserslist": { - "version": "4.21.9", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.9.tgz", - "integrity": "sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg==", - "dev": true, - "funding": [ - { + "node_modules/@fluentui/react-message-bar": { + "version": "9.2.15", + "resolved": "https://registry.npmjs.org/@fluentui/react-message-bar/-/react-message-bar-9.2.15.tgz", + "integrity": "sha512-+FPH3ciNjTWVk9hGIeo/G8QGHf/q+tFLle4g9hXuOuDuzuaHNK6g7SkXTLm0fiZVrkB3xhFZV5ZnfehiN93S1w==", + "dependencies": { + "@fluentui/react-button": "^9.3.95", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-link": "^9.3.2", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1", + "react-transition-group": "^4.4.1" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "@types/react-dom": ">=16.8.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-motion": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/@fluentui/react-motion/-/react-motion-9.6.1.tgz", + "integrity": "sha512-P/ZPEAXG24pGU/XY3vY6VOXxNMEztiN7lvJxqUHGDFbpMkgQwCOmfsBuNU4S6RLQy3PosbWfSsU/4N8Ga2XudQ==", + "dependencies": { + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-utilities": "^9.18.17", + "@swc/helpers": "^0.5.1", + "react-is": "^17.0.2" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "@types/react-dom": ">=16.8.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-motion-components-preview": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@fluentui/react-motion-components-preview/-/react-motion-components-preview-0.3.0.tgz", + "integrity": "sha512-N888xO727bSogyH0WUSW2pkjQ2vXEpyDa0Ygj+4XQaTfHz8DecDiKfM83zUpQ7pZOhx8eQPUP76flijm+iVm8w==", + "dependencies": { + "@fluentui/react-motion": "*", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-overflow": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/@fluentui/react-overflow/-/react-overflow-9.2.1.tgz", + "integrity": "sha512-6u+bP9PV1RedOSDgL+cHs4o3GRRWlEpKTtjeDSgs+nI5fkfN6bF+J70Uk5QksWDUBydMbkSbsD4Ta5+U2G6yww==", + "dependencies": { + "@fluentui/priority-overflow": "^9.1.14", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-persona": { + "version": "9.2.102", + "resolved": "https://registry.npmjs.org/@fluentui/react-persona/-/react-persona-9.2.102.tgz", + "integrity": "sha512-sIoKr2A/zMkFudmeO1+asG6FIItn0+FbKOXezgApHuucbq6iU8oKV8+OEHhCr/mHPulDAV8JZQYkhNHFhzSjdA==", + "dependencies": { + "@fluentui/react-avatar": "^9.6.43", + "@fluentui/react-badge": "^9.2.45", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-popover": { + "version": "9.9.25", + "resolved": "https://registry.npmjs.org/@fluentui/react-popover/-/react-popover-9.9.25.tgz", + "integrity": "sha512-QPhbD6MTDU6JuYZl0221IwqKEF3TEoNaL6kdAGnrltLuXVGX2pLr4LerHdbBORolfZZFo/JkKX644ay5X7BnvQ==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-positioning": "^9.15.12", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-portal": { + "version": "9.4.38", + "resolved": "https://registry.npmjs.org/@fluentui/react-portal/-/react-portal-9.4.38.tgz", + "integrity": "sha512-V4lvnjlmKqMloNK6tRXx7lDWR1g41ppFLAGMy+0KAMZRwvwiCNpWrr9oFVGTHqnh+3EuICgs1z0WiNUcbpviuA==", + "dependencies": { + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1", + "use-disposable": "^1.0.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-portal-compat-context": { + "version": "9.0.9", + "license": "MIT", + "dependencies": { + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "react": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-positioning": { + "version": "9.15.12", + "resolved": "https://registry.npmjs.org/@fluentui/react-positioning/-/react-positioning-9.15.12.tgz", + "integrity": "sha512-FqopxQpf8KibdovNFLNqcDzckMgaMO2EAwXhpzH1us1l9vNofVE33k0sGHr1kU+M9TXCKeJ9x31TdS5XzBMPzQ==", + "dependencies": { + "@floating-ui/devtools": "0.2.1", + "@floating-ui/dom": "^1.2.0", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-progress": { + "version": "9.1.91", + "resolved": "https://registry.npmjs.org/@fluentui/react-progress/-/react-progress-9.1.91.tgz", + "integrity": "sha512-7+po8q+kR30g6QutHIpro91l8NTkmSoOZRMuoiPesuIblqeoFPoywlBanJFvLRMAAQefILi0QaTri8+PtHFZwQ==", + "dependencies": { + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-provider": { + "version": "9.18.0", + "resolved": "https://registry.npmjs.org/@fluentui/react-provider/-/react-provider-9.18.0.tgz", + "integrity": "sha512-qJS2D/g3h2GwAiw2V1uWLePpAG2CKP0Pg8/iKy6vCdeNgToOGTt7ZinJSNzVzdN1y6kE2Na1glTkDLDwBj9IKg==", + "dependencies": { + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/core": "^1.16.0", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-radio": { + "version": "9.2.36", + "resolved": "https://registry.npmjs.org/@fluentui/react-radio/-/react-radio-9.2.36.tgz", + "integrity": "sha512-G6sYBcT6tEHmXELPvSqzOd/CJeNv6X/IAgnyg9dvXQUw4gBwG7qYuVDQQPDyG+vncA//845eSOf+o8mvBIRUfQ==", + "dependencies": { + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-label": "^9.1.78", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-rating": { + "version": "9.0.22", + "resolved": "https://registry.npmjs.org/@fluentui/react-rating/-/react-rating-9.0.22.tgz", + "integrity": "sha512-0mlOL2LDt1IrGOq3yIiM5niOk8Nmrip/Xef1Rnc4Q/X6EM66qwBk2fS0ZYtk4BXFlCn2sdsHeGwCy+6Dj7wgsQ==", + "dependencies": { + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "@types/react-dom": ">=16.8.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-search": { + "version": "9.0.22", + "resolved": "https://registry.npmjs.org/@fluentui/react-search/-/react-search-9.0.22.tgz", + "integrity": "sha512-+ZerMQVdnX7PhodaUF92SQTxv/6YJfcLQ/o6uJ2ppsYpBj8DX2bgWnmX7Ia0T9MReHHvIodRQXVTAFpJSBA+Gg==", + "dependencies": { + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-input": "^9.4.93", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-select": { + "version": "9.1.91", + "resolved": "https://registry.npmjs.org/@fluentui/react-select/-/react-select-9.1.91.tgz", + "integrity": "sha512-mrQORisf6xWKrooCX6F7qqvcgDT7ei4YMtH5KHVa+sCRyy5CC0jOAVD513rj7ysAVxLKv9TSuF/rdx/Cmc7Kzw==", + "dependencies": { + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-shared-contexts": { + "version": "9.21.0", + "resolved": "https://registry.npmjs.org/@fluentui/react-shared-contexts/-/react-shared-contexts-9.21.0.tgz", + "integrity": "sha512-GtP9zM7wpZtKXnq6qMd8ww0IN+5ZctPClVz83zDA602rJTJjihGwkmJ1ga8f/YphOTKcE12dnRQDl4iRL5vJ4A==", + "dependencies": { + "@fluentui/react-theme": "^9.1.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "react": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-skeleton": { + "version": "9.1.20", + "resolved": "https://registry.npmjs.org/@fluentui/react-skeleton/-/react-skeleton-9.1.20.tgz", + "integrity": "sha512-nK1rJGTriJdXR9y820NHmLNRJ6YAiJUVGAtVb7OIi7KoX7/IXt/qY/xx91jnECaWHOPGzlNO+S4hxYkLiU80iQ==", + "dependencies": { + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-slider": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/@fluentui/react-slider/-/react-slider-9.2.0.tgz", + "integrity": "sha512-96oT573BxYns4+dgGLQOT5j/4QfNIebXelvrw13AfBRBV2+WZlAApnpPujaTzv+DA86c8l+M3tqzAz11kznHzQ==", + "dependencies": { + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-spinbutton": { + "version": "9.2.92", + "resolved": "https://registry.npmjs.org/@fluentui/react-spinbutton/-/react-spinbutton-9.2.92.tgz", + "integrity": "sha512-lDfjsN1sj4ol4DEnlt1JJ0vKb8lmSMWSEWil1zgPL+wQyVCP389UsROWZuzWpUqa4PxBY78Z4LaAUQx8DM7Y8Q==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-spinner": { + "version": "9.5.2", + "resolved": "https://registry.npmjs.org/@fluentui/react-spinner/-/react-spinner-9.5.2.tgz", + "integrity": "sha512-eY6/WgrzTWFgebae5oE9/KS0TA7xrz9LRUccTEwcFBJQgrUFVUHo2jDNdIEaxzpWUGq0usCMQW10PFepnsKEqg==", + "dependencies": { + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-label": "^9.1.78", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-swatch-picker": { + "version": "9.1.13", + "resolved": "https://registry.npmjs.org/@fluentui/react-swatch-picker/-/react-swatch-picker-9.1.13.tgz", + "integrity": "sha512-gegZCrF+JpPPGPo0GHeJK5267LdIuBQ7sV4b0kLMmIbdzEPe9OFykb5M3PdtSpVCbwbwCX1dVcXG5cQZhAKfVA==", + "dependencies": { + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "@types/react-dom": ">=16.8.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-switch": { + "version": "9.1.98", + "resolved": "https://registry.npmjs.org/@fluentui/react-switch/-/react-switch-9.1.98.tgz", + "integrity": "sha512-vvU2XVU9BVlJb6GGiDOOIJ/7q3XsfxuuUx6sA4ROWhHxFd+oPq3a7S5g6BhPfBZapIRDn4XjlSSxAnKxZFi8SA==", + "dependencies": { + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-label": "^9.1.78", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-table": { + "version": "9.15.22", + "resolved": "https://registry.npmjs.org/@fluentui/react-table/-/react-table-9.15.22.tgz", + "integrity": "sha512-XQEmigbpWvDBHJQILcWMa9aJ4Nskt3D8t00GPuVeuSJP+1pW7aAz6MHYzDOeeVSDj1P8nk7sTSUss3TNd4VP5g==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-avatar": "^9.6.43", + "@fluentui/react-checkbox": "^9.2.41", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-radio": "^9.2.36", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-tabs": { + "version": "9.6.2", + "resolved": "https://registry.npmjs.org/@fluentui/react-tabs/-/react-tabs-9.6.2.tgz", + "integrity": "sha512-RjlKoF+QzfZ3FN7y+NIgcTcwPqecZYGxV7ij1HeWH05wkQcT+SFnu5GEeMfN05Snia/85zDdtiwSjHW4rllm4Q==", + "dependencies": { + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-tabster": { + "version": "9.23.0", + "resolved": "https://registry.npmjs.org/@fluentui/react-tabster/-/react-tabster-9.23.0.tgz", + "integrity": "sha512-YW9CcDDc4S2wV/fMex5VMZ+Nudxz0X67smSPo29sUFtCowEomZ+PRNbUhGkAgizrm7gTUCs+ITdvxm0vpl+bcQ==", + "dependencies": { + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1", + "keyborg": "^2.6.0", + "tabster": "^8.2.0" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-tag-picker": { + "version": "9.3.9", + "resolved": "https://registry.npmjs.org/@fluentui/react-tag-picker/-/react-tag-picker-9.3.9.tgz", + "integrity": "sha512-CX8+dbd3UX2Z2vy1guduBUPzqc9vVvEcyB4LSKkTjin8s2QH4+uip7oWA6ba6EpueFIocbE3X3+BYRiwoo01LA==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-combobox": "^9.13.12", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-positioning": "^9.15.12", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-tags": "^9.3.23", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-tags": { + "version": "9.3.23", + "resolved": "https://registry.npmjs.org/@fluentui/react-tags/-/react-tags-9.3.23.tgz", + "integrity": "sha512-XX9NcAqBqkhTrbP2iYFp9LGA0NG5ZDf5X8FxtD+uUyDo+P9v6m6Tqqd0EHYtGB26aZLHTZWZTJpuq6klx/KdAQ==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-avatar": "^9.6.43", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-teaching-popover": { + "version": "9.1.22", + "resolved": "https://registry.npmjs.org/@fluentui/react-teaching-popover/-/react-teaching-popover-9.1.22.tgz", + "integrity": "sha512-chzQ251KL19FPi1VRGiDMYLu/BnTUhMEyes2vaCyX8oZwcxvu37N/1PIQcbd9KCPN0kXX4TY3wVLZI8CFfporA==", + "dependencies": { + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-button": "^9.3.95", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-popover": "^9.9.25", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1", + "use-sync-external-store": "^1.2.0" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "@types/react-dom": ">=16.8.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-text": { + "version": "9.4.27", + "resolved": "https://registry.npmjs.org/@fluentui/react-text/-/react-text-9.4.27.tgz", + "integrity": "sha512-/a1/eibyGYcWsc5M0i32vOAD/zf2gD5lDjaLXSiwoerF+e0j7GLgjbTi63ZK3K3Sh2repTrW/nsAHhqbeQhMyw==", + "dependencies": { + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-textarea": { + "version": "9.3.92", + "resolved": "https://registry.npmjs.org/@fluentui/react-textarea/-/react-textarea-9.3.92.tgz", + "integrity": "sha512-Vmv0l8rGs34pjNSUDPKazZVN2yiWbda0PWy9PhOTIZsl9DdcLwyLcge3tKHnxHBvqEz6c1VzKxgK3+liLaSxpg==", + "dependencies": { + "@fluentui/react-field": "^9.1.80", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-theme": { + "version": "9.1.22", + "resolved": "https://registry.npmjs.org/@fluentui/react-theme/-/react-theme-9.1.22.tgz", + "integrity": "sha512-+orOyOsI0I7m6ovkU20soe8BUOS6eESfVAr3iZ+P9NsqtnCRNnrkOnfEmuOIh+UkNhljEkY9pVUSF1JPq+XHtg==", + "dependencies": { + "@fluentui/tokens": "1.0.0-alpha.19", + "@swc/helpers": "^0.5.1" + } + }, + "node_modules/@fluentui/react-toast": { + "version": "9.3.59", + "resolved": "https://registry.npmjs.org/@fluentui/react-toast/-/react-toast-9.3.59.tgz", + "integrity": "sha512-42+MBvjkwCmEj46pvwN0+8HABXJ0tbm1gSuAlaiQO5zIO+xWCZKLeqlGtbJ2DH6G6ZcOwBkiOXioOLyRS7t03A==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-motion": "^9.6.1", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-toolbar": { + "version": "9.2.10", + "resolved": "https://registry.npmjs.org/@fluentui/react-toolbar/-/react-toolbar-9.2.10.tgz", + "integrity": "sha512-lTix5YU3u85JnI/ISSraNIQDdj3FX6n2Xuzd27lGC6cebpI799NsZVfaprwNr5ywOwLlJ/B+kQXflQMZAJ4NxA==", + "dependencies": { + "@fluentui/react-button": "^9.3.95", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-divider": "^9.2.77", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-radio": "^9.2.36", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-tooltip": { + "version": "9.4.43", + "resolved": "https://registry.npmjs.org/@fluentui/react-tooltip/-/react-tooltip-9.4.43.tgz", + "integrity": "sha512-KUIrs7uxjC916HT6XJgCfcxoxlbABi6TlriOzi/aELh0Gu5zH/9UPgvKw5BzWQUUyFLpjVOBKjogqI5SdsQGRg==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-portal": "^9.4.38", + "@fluentui/react-positioning": "^9.15.12", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-tree": { + "version": "9.8.6", + "resolved": "https://registry.npmjs.org/@fluentui/react-tree/-/react-tree-9.8.6.tgz", + "integrity": "sha512-iqT7wRz3uz/zgUkuxCc7LeDBhtVNmv2fA2e5AoEgcFGJRck3b97G9l8bqiyaitqt/1MXLCKOf0LlTqLpe7mVbQ==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-aria": "^9.13.9", + "@fluentui/react-avatar": "^9.6.43", + "@fluentui/react-button": "^9.3.95", + "@fluentui/react-checkbox": "^9.2.41", + "@fluentui/react-context-selector": "^9.1.69", + "@fluentui/react-icons": "^2.0.245", + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-motion": "^9.6.1", + "@fluentui/react-motion-components-preview": "^0.3.0", + "@fluentui/react-radio": "^9.2.36", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-tabster": "^9.23.0", + "@fluentui/react-theme": "^9.1.22", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-utilities": { + "version": "9.18.17", + "resolved": "https://registry.npmjs.org/@fluentui/react-utilities/-/react-utilities-9.18.17.tgz", + "integrity": "sha512-xW3e+sNd14njyXX1ovI2I8Sz/kjuieGzEbMbduNQONERp6Doc4JItPyxXUgv20qZ8eFYO6AykcI+xCTpHRkiBA==", + "dependencies": { + "@fluentui/keyboard-keys": "^9.0.8", + "@fluentui/react-shared-contexts": "^9.21.0", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "react": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-virtualizer": { + "version": "9.0.0-alpha.87", + "resolved": "https://registry.npmjs.org/@fluentui/react-virtualizer/-/react-virtualizer-9.0.0-alpha.87.tgz", + "integrity": "sha512-NbeZ9COirzepBqSnUjfAJzgep7b9Z718Rqrr66vMFkBSKC5pfkeS4qrQIXyansNndSy6AUz8i0SI/JLGS8wyNw==", + "dependencies": { + "@fluentui/react-jsx-runtime": "^9.0.46", + "@fluentui/react-shared-contexts": "^9.21.0", + "@fluentui/react-utilities": "^9.18.17", + "@griffel/react": "^1.5.22", + "@swc/helpers": "^0.5.1" + }, + "peerDependencies": { + "@types/react": ">=16.14.0 <19.0.0", + "@types/react-dom": ">=16.9.0 <19.0.0", + "react": ">=16.14.0 <19.0.0", + "react-dom": ">=16.14.0 <19.0.0" + } + }, + "node_modules/@fluentui/react-window-provider": { + "version": "2.2.16", + "license": "MIT", + "dependencies": { + "@fluentui/set-version": "^8.2.12", + "tslib": "^2.1.0" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "react": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@fluentui/set-version": { + "version": "8.2.12", + "license": "MIT", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/@fluentui/style-utilities": { + "version": "8.9.19", + "license": "MIT", + "dependencies": { + "@fluentui/merge-styles": "^8.5.13", + "@fluentui/set-version": "^8.2.12", + "@fluentui/theme": "^2.6.37", + "@fluentui/utilities": "^8.13.20", + "@microsoft/load-themed-styles": "^1.10.26", + "tslib": "^2.1.0" + } + }, + "node_modules/@fluentui/theme": { + "version": "2.6.37", + "license": "MIT", + "dependencies": { + "@fluentui/merge-styles": "^8.5.13", + "@fluentui/set-version": "^8.2.12", + "@fluentui/utilities": "^8.13.20", + "tslib": "^2.1.0" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "react": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@fluentui/tokens": { + "version": "1.0.0-alpha.19", + "resolved": "https://registry.npmjs.org/@fluentui/tokens/-/tokens-1.0.0-alpha.19.tgz", + "integrity": "sha512-Y1MI/d/SVhheFglzG/hyyNynbUk9vby7yU4oMLbIlqNRyQw03hPE3LhHb1k9/EHAuLxRioezEcEhRfOD8ej8dQ==", + "dependencies": { + "@swc/helpers": "^0.5.1" + } + }, + "node_modules/@fluentui/utilities": { + "version": "8.13.20", + "license": "MIT", + "dependencies": { + "@fluentui/dom-utilities": "^2.2.12", + "@fluentui/merge-styles": "^8.5.13", + "@fluentui/set-version": "^8.2.12", + "tslib": "^2.1.0" + }, + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "react": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@griffel/core": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/@griffel/core/-/core-1.18.0.tgz", + "integrity": "sha512-3Dkn6f7ULeSzJ1wLyLfN1vc+v3q5shuEejeMe4XymBozQo0l35WIfH8FWcwB+Xrgip4fLLOy1p3sYN85gFGZxw==", + "license": "MIT", + "dependencies": { + "@emotion/hash": "^0.9.0", + "@griffel/style-types": "^1.2.0", + "csstype": "^3.1.3", + "rtl-css-js": "^1.16.1", + "stylis": "^4.2.0", + "tslib": "^2.1.0" + } + }, + "node_modules/@griffel/react": { + "version": "1.5.25", + "resolved": "https://registry.npmjs.org/@griffel/react/-/react-1.5.25.tgz", + "integrity": "sha512-ZGiCdn71VIX56fd3AxM7ouCxgClPvunOFIpXxFKebGJ94/rdj4sIbahuI1QBUFuU4/bqUyD6QonjDEpFBl9ORw==", + "license": "MIT", + "dependencies": { + "@griffel/core": "^1.18.0", + "tslib": "^2.1.0" + }, + "peerDependencies": { + "react": ">=16.8.0 <19.0.0" + } + }, + "node_modules/@griffel/style-types": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@griffel/style-types/-/style-types-1.2.0.tgz", + "integrity": "sha512-x166MNw0vWe5l5qhinfNT4eyWOaP48iFzPyFOfIB0/BVidKTWsEe5PmqRJDDtrJFS3VHhd/tE0oM6tkEMh2tsg==", + "license": "MIT", + "dependencies": { + "csstype": "^3.1.3" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@microsoft/load-themed-styles": { + "version": "1.10.295", + "license": "MIT" + }, + "node_modules/@react-spring/animated": { + "version": "9.7.5", + "resolved": "https://registry.npmjs.org/@react-spring/animated/-/animated-9.7.5.tgz", + "integrity": "sha512-Tqrwz7pIlsSDITzxoLS3n/v/YCUHQdOIKtOJf4yL6kYVSDTSmVK1LI1Q3M/uu2Sx4X3pIWF3xLUhlsA6SPNTNg==", + "dependencies": { + "@react-spring/shared": "~9.7.5", + "@react-spring/types": "~9.7.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/@react-spring/core": { + "version": "9.7.5", + "resolved": "https://registry.npmjs.org/@react-spring/core/-/core-9.7.5.tgz", + "integrity": "sha512-rmEqcxRcu7dWh7MnCcMXLvrf6/SDlSokLaLTxiPlAYi11nN3B5oiCUAblO72o+9z/87j2uzxa2Inm8UbLjXA+w==", + "dependencies": { + "@react-spring/animated": "~9.7.5", + "@react-spring/shared": "~9.7.5", + "@react-spring/types": "~9.7.5" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/react-spring/donate" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/@react-spring/rafz": { + "version": "9.7.5", + "resolved": "https://registry.npmjs.org/@react-spring/rafz/-/rafz-9.7.5.tgz", + "integrity": "sha512-5ZenDQMC48wjUzPAm1EtwQ5Ot3bLIAwwqP2w2owG5KoNdNHpEJV263nGhCeKKmuA3vG2zLLOdu3or6kuDjA6Aw==" + }, + "node_modules/@react-spring/shared": { + "version": "9.7.5", + "resolved": "https://registry.npmjs.org/@react-spring/shared/-/shared-9.7.5.tgz", + "integrity": "sha512-wdtoJrhUeeyD/PP/zo+np2s1Z820Ohr/BbuVYv+3dVLW7WctoiN7std8rISoYoHpUXtbkpesSKuPIw/6U1w1Pw==", + "dependencies": { + "@react-spring/rafz": "~9.7.5", + "@react-spring/types": "~9.7.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/@react-spring/types": { + "version": "9.7.5", + "resolved": "https://registry.npmjs.org/@react-spring/types/-/types-9.7.5.tgz", + "integrity": "sha512-HVj7LrZ4ReHWBimBvu2SKND3cDVUPWKLqRTmWe/fNY6o1owGOX0cAHbdPDTMelgBlVbrTKrre6lFkhqGZErK/g==" + }, + "node_modules/@react-spring/web": { + "version": "9.7.5", + "resolved": "https://registry.npmjs.org/@react-spring/web/-/web-9.7.5.tgz", + "integrity": "sha512-lmvqGwpe+CSttsWNZVr+Dg62adtKhauGwLyGE/RRyZ8AAMLgb9x3NDMA5RMElXo+IMyTkPp7nxTB8ZQlmhb6JQ==", + "dependencies": { + "@react-spring/animated": "~9.7.5", + "@react-spring/core": "~9.7.5", + "@react-spring/shared": "~9.7.5", + "@react-spring/types": "~9.7.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/@remix-run/router": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.21.0.tgz", + "integrity": "sha512-xfSkCAchbdG5PnbrKqFWwia4Bi61nH+wm8wLEqfHDyp7Y3dZzgqS2itV8i4gAq9pC2HsTpwyBC6Ds8VHZ96JlA==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz", + "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz", + "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz", + "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz", + "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz", + "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz", + "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz", + "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz", + "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz", + "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz", + "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz", + "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz", + "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz", + "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz", + "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz", + "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz", + "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@swc/helpers": { + "version": "0.5.3", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.8", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", + "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.6", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", + "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/dom-speech-recognition": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/@types/dom-speech-recognition/-/dom-speech-recognition-0.0.4.tgz", + "integrity": "sha512-zf2GwV/G6TdaLwpLDcGTIkHnXf8JEf/viMux+khqKQKDa8/8BAUtXXZS563GnvJ4Fg0PBLGAaFf2GekEVSZ6GQ==", + "dev": true + }, + "node_modules/@types/dompurify": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.5.tgz", + "integrity": "sha512-1Wg0g3BtQF7sSb27fJQAKck1HECM6zV1EB66j8JH9i3LCjYabJa0FSdiSgsD5K/RbrsR0SiraKacLB+T8ZVYAg==", + "dev": true, + "dependencies": { + "@types/trusted-types": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", + "license": "MIT" + }, + "node_modules/@types/estree-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/@types/hast": { + "version": "2.3.7", + "license": "MIT", + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/ms": { + "version": "0.7.34", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz", + "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==", + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.5", + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.12", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.12.tgz", + "integrity": "sha512-D2wOSq/d6Agt28q7rSI3jhU7G6aiuzljDGZ2hTZHIkrTLUI+AF3WMeKkEZ9nN2fkBAlcktT6vcZjDFiIhMYEQw==", + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.0.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-qW1Mfv8taImTthu4KoXgDfLuk4bydU6Q/TkADnDWWHwi4NX4BR+LWfTp2sVmTqRrsHvyDDTelgelxJ+SsejKKQ==", + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/react-syntax-highlighter": { + "version": "15.5.13", + "resolved": "https://registry.npmjs.org/@types/react-syntax-highlighter/-/react-syntax-highlighter-15.5.13.tgz", + "integrity": "sha512-uLGJ87j6Sz8UaBAooU0T6lWJ0dBmjZgN1PZTrj05TNql2/XpC6+4HhMT5syIdFUUt+FASfCeLLv4kBygNU+8qA==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/@types/unist": { + "version": "2.0.7", + "license": "MIT" + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "license": "ISC" + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.3.3.tgz", + "integrity": "sha512-NooDe9GpHGqNns1i8XDERg0Vsg5SSYRhRxxyTGogUdkdNt47jal+fbuYi+Yfq6pzRCKXyoPcWisfxE6RIM3GKA==", + "dev": true, + "dependencies": { + "@babel/core": "^7.25.2", + "@babel/plugin-transform-react-jsx-self": "^7.24.7", + "@babel/plugin-transform-react-jsx-source": "^7.24.7", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.14.2" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/browserslist": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001667", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001667.tgz", + "integrity": "sha512-7LTwJjcRkzKFmtqGsibMeuXmvFDfZq/nzIjnmgCGzKKRVzjD72selLDK1oPF/Oxzmt4fNcPvTDvGqSDG4tCALw==", + "dev": true, + "funding": [ + { "type": "opencollective", "url": "https://opencollective.com/browserslist" }, { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/character-entities": { + "version": "1.2.4", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "1.1.4", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-reference-invalid": { + "version": "1.1.4", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/comma-separated-tokens": { + "version": "1.0.8", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/cross-fetch": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-4.0.0.tgz", + "integrity": "sha512-e4a5N8lVvuLgAWgnCrLr2PP0YyDOTHa9H/Rj54dirp61qXnNq46m82bRhNqIA5VccJtWBvPTFRV3TtvHUKPB1g==", + "license": "MIT", + "dependencies": { + "node-fetch": "^2.6.12" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decode-named-character-reference": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", + "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", + "license": "MIT", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/decode-named-character-reference/node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, + "node_modules/dompurify": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.4.tgz", + "integrity": "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.32", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.32.tgz", + "integrity": "sha512-M+7ph0VGBQqqpTT2YrabjNKSQ2fEl9PVx6AK3N558gDH9NO8O6XN9SXXFWRo9u9PbEg/bWq+tjXQr+eXmxubCw==", + "dev": true + }, + "node_modules/embla-carousel": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/embla-carousel/-/embla-carousel-8.4.0.tgz", + "integrity": "sha512-sUzm4DGGsdZCom7LEO38Uu6C7oQoFfPorKDf/f7j2EeRCMhHSOt3CvF+pHCaI6N+x5Y8/tfLueJ0WZlgUREnew==" + }, + "node_modules/embla-carousel-autoplay": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/embla-carousel-autoplay/-/embla-carousel-autoplay-8.4.0.tgz", + "integrity": "sha512-AJHXrnaY+Tf4tb/+oItmJSpz4P0WvS62GrW5Z4iFY3zsH0mkKcijzd04LIkj0P4DkTazIBEuXple+nUVmuMsrQ==", + "peerDependencies": { + "embla-carousel": "8.4.0" + } + }, + "node_modules/embla-carousel-fade": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/embla-carousel-fade/-/embla-carousel-fade-8.4.0.tgz", + "integrity": "sha512-d2/Pk/gHnlLCwE0MuwjLxLn22ngTf1rS17KT+TsYctVCApvDvxwgn5bDrwSpwg4BZhO4+xIrWw293rAHjCDewQ==", + "peerDependencies": { + "embla-carousel": "8.4.0" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, + "node_modules/fault": { + "version": "1.0.4", + "license": "MIT", + "dependencies": { + "format": "^0.2.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/format": { + "version": "0.2.2", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/hast-util-from-parse5": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz", + "integrity": "sha512-Er/Iixbc7IEa7r/XLtuG52zoqn/b3Xng/w6aZQ0xGVxzhw5xUFxcRqdPzP6yFi/4HBYRaifaI5fQ1RH8n0ZeOQ==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "hastscript": "^8.0.0", + "property-information": "^6.0.0", + "vfile": "^6.0.0", + "vfile-location": "^5.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-from-parse5/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-from-parse5/node_modules/hast-util-parse-selector": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/hastscript": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-8.0.0.tgz", + "integrity": "sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-from-parse5/node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-parse-selector": { + "version": "2.2.5", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.0.4.tgz", + "integrity": "sha512-LHE65TD2YiNsHD3YuXcKPHXPLuYh/gjp12mOfU8jxSrm1f/yJpsb0F/KKljS6U9LJoP0Ux+tCe8iJ2AsPzTdgA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "@ungap/structured-clone": "^1.0.0", + "hast-util-from-parse5": "^8.0.0", + "hast-util-to-parse5": "^8.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "parse5": "^7.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-raw/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/hast-util-to-jsx-runtime": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.0.tgz", + "integrity": "sha512-H/y0+IWPdsLLS738P8tDnrQ8Z+dj12zQQ6WC11TIM21C8WFVoIxcqWXf2H3hiTVZjF1AWqoimGwrTWecWrnmRQ==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-object": "^1.0.0", + "unist-util-position": "^5.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-jsx-runtime/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-to-jsx-runtime/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/hast-util-to-jsx-runtime/node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-jsx-runtime/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-jsx-runtime/node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-parse5": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz", + "integrity": "sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-parse5/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-parse5/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-parse5/node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hastscript": { + "version": "6.0.0", + "license": "MIT", + "dependencies": { + "@types/hast": "^2.0.0", + "comma-separated-tokens": "^1.0.0", + "hast-util-parse-selector": "^2.0.0", + "property-information": "^5.0.0", + "space-separated-tokens": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/highlight.js": { + "version": "10.7.3", + "license": "BSD-3-Clause", + "engines": { + "node": "*" + } + }, + "node_modules/highlightjs-vue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/highlightjs-vue/-/highlightjs-vue-1.0.0.tgz", + "integrity": "sha512-PDEfEF102G23vHmPhLyPboFCD+BkMGu+GuJe2d9/eH4FsCwvgBpnc9n0pGE+ffKdph38s6foEZiEjdgHdzp+IA==" + }, + "node_modules/html-parse-stringify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/html-parse-stringify/-/html-parse-stringify-3.0.1.tgz", + "integrity": "sha512-KknJ50kTInJ7qIScF3jeaFRpMpE8/lfiTdzf/twXyPBLAGrLRTmkz3AdTnKeh40X8k9L2fdYwEp/42WGXIRGcg==", + "license": "MIT", + "dependencies": { + "void-elements": "3.1.0" + } + }, + "node_modules/html-url-attributes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.0.tgz", + "integrity": "sha512-/sXbVCWayk6GDVg3ctOX6nxaVj7So40FcFAnWlWGNAB1LpYKcV5Cd10APjPjW80O7zYW2MsjBV4zZ7IZO5fVow==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/i18next": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-24.2.0.tgz", + "integrity": "sha512-ArJJTS1lV6lgKH7yEf4EpgNZ7+THl7bsGxxougPYiXRTJ/Fe1j08/TBpV9QsXCIYVfdE/HWG/xLezJ5DOlfBOA==", + "funding": [ + { + "type": "individual", + "url": "https://locize.com" + }, + { + "type": "individual", + "url": "https://locize.com/i18next.html" }, { - "type": "github", - "url": "https://github.com/sponsors/ai" + "type": "individual", + "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project" } ], "dependencies": { - "caniuse-lite": "^1.0.30001503", - "electron-to-chromium": "^1.4.431", - "node-releases": "^2.0.12", - "update-browserslist-db": "^1.0.11" + "@babel/runtime": "^7.23.2" + }, + "peerDependencies": { + "typescript": "^5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/i18next-browser-languagedetector": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/i18next-browser-languagedetector/-/i18next-browser-languagedetector-8.0.2.tgz", + "integrity": "sha512-shBvPmnIyZeD2VU5jVGIOWP7u9qNG3Lj7mpaiPFpbJ3LVfHZJvVzKR4v1Cb91wAOFpNw442N+LGPzHOHsten2g==", + "dependencies": { + "@babel/runtime": "^7.23.2" + } + }, + "node_modules/i18next-http-backend": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/i18next-http-backend/-/i18next-http-backend-3.0.1.tgz", + "integrity": "sha512-XT2lYSkbAtDE55c6m7CtKxxrsfuRQO3rUfHzj8ZyRtY9CkIX3aRGwXGTkUhpGWce+J8n7sfu3J0f2wTzo7Lw0A==", + "dependencies": { + "cross-fetch": "4.0.0" + } + }, + "node_modules/idb": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/idb/-/idb-8.0.0.tgz", + "integrity": "sha512-l//qvlAKGmQO31Qn7xdzagVPPaHTxXx199MhrAFuVBTPqydcPYBWjkrbv4Y0ktB+GmWOiwHl237UUOrLmQxLvw==" + }, + "node_modules/inline-style-parser": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.3.tgz", + "integrity": "sha512-qlD8YNDqyTKTyuITrDOffsl6Tdhv+UC4hcdAVuQsK4IMQ99nSgd1MIA/Q+jQYoh9r3hVUXhYh7urSRmXPkW04g==", + "license": "MIT" + }, + "node_modules/invariant": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", + "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.0.0" + } + }, + "node_modules/is-alphabetical": { + "version": "1.0.4", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-alphanumerical": { + "version": "1.0.4", + "license": "MIT", + "dependencies": { + "is-alphabetical": "^1.0.0", + "is-decimal": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-decimal": { + "version": "1.0.4", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true, + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-hexadecimal": { + "version": "1.0.4", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyborg": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/keyborg/-/keyborg-2.6.0.tgz", + "integrity": "sha512-o5kvLbuTF+o326CMVYpjlaykxqYP9DphFQZ2ZpgrvBouyvOxyEB7oqe8nOLFpiV5VCtz0D3pt8gXQYWpLpBnmA==" + }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { - "browserslist": "cli.js" + "loose-envify": "cli.js" + } + }, + "node_modules/lowlight": { + "version": "1.20.0", + "license": "MIT", + "dependencies": { + "fault": "^1.0.0", + "highlight.js": "~10.7.0" }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/caniuse-lite": { - "version": "1.0.30001513", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001513.tgz", - "integrity": "sha512-pnjGJo7SOOjAGytZZ203Em95MRM8Cr6jhCXNF/FAXTpCTRTECnqQWLpiTRqrFtdYcth8hf4WECUpkezuYsMVww==", + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ] + "dependencies": { + "yallist": "^3.0.2" + } }, - "node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, + "node_modules/markdown-table": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz", + "integrity": "sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-find-and-replace": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz", + "integrity": "sha512-SG21kZHGC3XRTSUhtofZkBzZTJNM5ecCi0SK2IMKmSXR8vO3peL+kb1O0z7Zl83jKtutG4k5Wv/W7V3/YHvzPA==", + "license": "MIT", "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "@types/mdast": "^4.0.0", + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, + "node_modules/mdast-util-from-markdown": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.1.tgz", + "integrity": "sha512-aJEUyzZ6TzlsX2s5B4Of7lN7EQtAxvtradMMglCQDyaTFgse6CmtmdJ15ElnVRlCg1vpNyVtbem0PWzlNieZsA==", + "license": "MIT", "dependencies": { - "color-name": "1.1.3" + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true + "node_modules/mdast-util-from-markdown/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" }, - "node_modules/convert-source-map": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", - "dev": true + "node_modules/mdast-util-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz", + "integrity": "sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw==", + "license": "MIT", + "dependencies": { + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-gfm-autolink-literal": "^2.0.0", + "mdast-util-gfm-footnote": "^2.0.0", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/csstype": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", - "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==" + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", + "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-find-and-replace": "^3.0.0", + "micromark-util-character": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, + "node_modules/mdast-util-gfm-footnote": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz", + "integrity": "sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ==", + "license": "MIT", "dependencies": { - "ms": "2.1.2" + "@types/mdast": "^4.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0" }, - "engines": { - "node": ">=6.0" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/dompurify": { + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.0.tgz", + "integrity": "sha512-fGCu8eWdKUKNu5mohVGkhBXCXGnOTLuFqOvGMvdikr+J1w7lDJgxThOKpwRWzzbyXAU2hhSwsmssOY4yTokluw==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/@types/hast": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.4.tgz", - "integrity": "sha512-ae0mA+Qiqp6C29pqZX3fQgK+F91+F7wobM/v8DRzDqJdZJELXiFUx4PP4pK/mzUS0xkiSEx3Ncd9gr69jg3YsQ==" + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } }, - "node_modules/electron-to-chromium": { - "version": "1.4.454", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.454.tgz", - "integrity": "sha512-pmf1rbAStw8UEQ0sr2cdJtWl48ZMuPD9Sto8HVQOq9vx9j2WgDEN6lYoaqFvqEHYOmGA9oRGn7LqWI9ta0YugQ==", - "dev": true + "node_modules/mdast-util-mdx-jsx": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.2.tgz", + "integrity": "sha512-eKMQDeywY2wlHc97k5eD8VC+9ASMjN8ItEZQNGwJ6E0XWKiW/Z0V5/H8pvoXUf+y+Mj0VIgeRRbujBmFn4FTyA==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-remove-position": "^5.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/esbuild": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.11.tgz", - "integrity": "sha512-i8u6mQF0JKJUlGR3OdFLKldJQMMs8OqM9Cc3UCi9XXziJ9WERM5bfkHaEAy0YAvPRMgqSW55W7xYn84XtEFTtA==", - "dev": true, - "hasInstallScript": true, - "bin": { - "esbuild": "bin/esbuild" + "node_modules/mdast-util-mdx-jsx/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/mdast-util-mdx-jsx/node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "license": "MIT", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" }, - "engines": { - "node": ">=12" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/parse-entities": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.1.tgz", + "integrity": "sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" }, - "optionalDependencies": { - "@esbuild/android-arm": "0.18.11", - "@esbuild/android-arm64": "0.18.11", - "@esbuild/android-x64": "0.18.11", - "@esbuild/darwin-arm64": "0.18.11", - "@esbuild/darwin-x64": "0.18.11", - "@esbuild/freebsd-arm64": "0.18.11", - "@esbuild/freebsd-x64": "0.18.11", - "@esbuild/linux-arm": "0.18.11", - "@esbuild/linux-arm64": "0.18.11", - "@esbuild/linux-ia32": "0.18.11", - "@esbuild/linux-loong64": "0.18.11", - "@esbuild/linux-mips64el": "0.18.11", - "@esbuild/linux-ppc64": "0.18.11", - "@esbuild/linux-riscv64": "0.18.11", - "@esbuild/linux-s390x": "0.18.11", - "@esbuild/linux-x64": "0.18.11", - "@esbuild/netbsd-x64": "0.18.11", - "@esbuild/openbsd-x64": "0.18.11", - "@esbuild/sunos-x64": "0.18.11", - "@esbuild/win32-arm64": "0.18.11", - "@esbuild/win32-ia32": "0.18.11", - "@esbuild/win32-x64": "0.18.11" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "dev": true, - "engines": { - "node": ">=6" + "node_modules/mdast-util-mdx-jsx/node_modules/parse-entities/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "license": "MIT" + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" + "node_modules/mdast-util-mdxjs-esm/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" } }, - "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "engines": { - "node": ">=6.9.0" + "node_modules/mdast-util-to-hast": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", + "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" + "node_modules/mdast-util-to-hast/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" } }, - "node_modules/has-flag": { + "node_modules/mdast-util-to-markdown": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.0.tgz", + "integrity": "sha512-SR2VnIEdVNCJbP6y7kVTJgPLifdr8WEU440fQec7qHoHOUz/oJ2jmNRqdDQ3rbiStOXb2mCDGTuwsK5OPUgYlQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.0.tgz", + "integrity": "sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.1.tgz", + "integrity": "sha512-CUQyKr1e///ZODyD1U3xit6zXwy1a8q2a1S1HKtIlmgvurrEpaw/Y9y6KSIbF8P59cn/NjzHyO+Q2fAyYLQrAA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "engines": { - "node": ">=4" + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", + "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", + "license": "MIT", + "dependencies": { + "micromark-extension-gfm-autolink-literal": "^2.0.0", + "micromark-extension-gfm-footnote": "^2.0.0", + "micromark-extension-gfm-strikethrough": "^2.0.0", + "micromark-extension-gfm-table": "^2.0.0", + "micromark-extension-gfm-tagfilter": "^2.0.0", + "micromark-extension-gfm-task-list-item": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", + "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" + "node_modules/micromark-extension-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" }, - "engines": { - "node": ">=4" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, - "bin": { - "json5": "lib/cli.js" + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", + "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" }, - "engines": { - "node": ">=6" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "node_modules/micromark-extension-gfm-table": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.0.tgz", + "integrity": "sha512-Ub2ncQv+fwD70/l4ou27b4YzfNaCJOvyX4HxXU15m7mpYY+rjuWzsLIPZHJL253Z643RpbcP1oeIJlQ/SKW67g==", + "license": "MIT", "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" }, - "bin": { - "loose-envify": "cli.js" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", + "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", + "license": "MIT", "dependencies": { - "yallist": "^3.0.2" + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", + "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/nanoid": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", - "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==", - "dev": true, + "node_modules/micromark-factory-destination": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz", + "integrity": "sha512-j9DGrQLm/Uhl2tCzcbLhy5kXsgkHUrjJHg4fFAeoMRwJmJerT9aw4FEhIbZStWN8A3qMwOp1uzHr4UL8AInxtA==", "funding": [ { - "type": "github", - "url": "https://github.com/sponsors/ai" + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" } ], - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/node-releases": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", - "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", - "dev": true - }, - "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", - "dev": true - }, - "node_modules/postcss": { - "version": "8.4.25", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.25.tgz", - "integrity": "sha512-7taJ/8t2av0Z+sQEvNzCkpDynl0tX3uJMCODi6nT3PfASC7dYCWV9aQ+uiCf+KBD4SEFcu+GvJdGdwzQ6OSjCw==", - "dev": true, + "node_modules/micromark-factory-label": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.0.tgz", + "integrity": "sha512-RR3i96ohZGde//4WSe/dJsxOX6vxIg9TimLAS3i4EhBAFx8Sm5SmqVfR8E87DPSR31nEAjZfbt91OMZWcNgdZw==", "funding": [ { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" }, { - "type": "github", - "url": "https://github.com/sponsors/ai" + "type": "OpenCollective", + "url": "https://opencollective.com/unified" } ], + "license": "MIT", "dependencies": { - "nanoid": "^3.3.6", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/prettier": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.0.tgz", - "integrity": "sha512-zBf5eHpwHOGPC47h0zrPyNn+eAEIdEzfywMoYn2XPi0P44Zp0tSq64rq0xAREh4auw2cJZHo9QUob+NqCQky4g==", - "dev": true, - "bin": { - "prettier": "bin/prettier.cjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" + "node_modules/micromark-factory-space": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.0.tgz", + "integrity": "sha512-TKr+LIDX2pkBJXFLzpyPyljzYK3MtmllMUMODTQJIUfDGncESaqB90db9IAUcz4AZAJFdd8U9zOp9ty1458rxg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/react": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", - "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", + "node_modules/micromark-factory-title": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.0.tgz", + "integrity": "sha512-jY8CSxmpWLOxS+t8W+FG3Xigc0RDQA9bKMY/EwILvsesiRniiVMejYTE4wumNc2f4UbAa4WsHqe3J1QS1sli+A==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "loose-envify": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/react-dom": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", - "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", + "node_modules/micromark-factory-whitespace": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.0.tgz", + "integrity": "sha512-28kbwaBjc5yAI1XadbdPYHX/eDnqaUFVikLwrO7FDnKG7lpgxnvk/XGRhX/PN0mOZ+dBSZ+LgunHS+6tYQAzhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" - }, - "peerDependencies": { - "react": "^18.2.0" + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/react-refresh": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.0.tgz", - "integrity": "sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node_modules/micromark-util-character": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/react-router": { - "version": "6.14.1", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.14.1.tgz", - "integrity": "sha512-U4PfgvG55LdvbQjg5Y9QRWyVxIdO1LlpYT7x+tMAxd9/vmiPuJhIwdxZuIQLN/9e3O4KFDHYfR9gzGeYMasW8g==", + "node_modules/micromark-util-chunked": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.0.tgz", + "integrity": "sha512-anK8SWmNphkXdaKgz5hJvGa7l00qmcaUQoMYsBwDlSKFKjc6gjGXPDw3FNL3Nbwq5L8gE+RCbGqTw49FK5Qyvg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@remix-run/router": "1.7.1" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "react": ">=16.8" + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/react-router-dom": { - "version": "6.14.1", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.14.1.tgz", - "integrity": "sha512-ssF6M5UkQjHK70fgukCJyjlda0Dgono2QGwqGvuk7D+EDGHdacEN3Yke2LTMjkrpHuFwBfDFsEjGVXBDmL+bWw==", + "node_modules/micromark-util-classify-character": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.0.tgz", + "integrity": "sha512-S0ze2R9GH+fu41FA7pbSqNWObo/kzwf8rN/+IGlW/4tC6oACOs8B++bh+i9bVyNnwCcuksbFwsBme5OCKXCwIw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@remix-run/router": "1.7.1", - "react-router": "6.14.1" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "react": ">=16.8", - "react-dom": ">=16.8" + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/regenerator-runtime": { - "version": "0.13.11", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", - "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==" - }, - "node_modules/rollup": { - "version": "3.26.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.26.2.tgz", - "integrity": "sha512-6umBIGVz93er97pMgQO08LuH3m6PUb3jlDUUGFsNJB6VgTCUaDFpupf5JfU30529m/UKOgmiX+uY6Sx8cOYpLA==", - "dev": true, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=14.18.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.0.tgz", + "integrity": "sha512-vZZio48k7ON0fVS3CUgFatWHoKbbLTK/rT7pzpJ4Bjp5JjkZeasRfrS9wsBdDJK2cJLHMckXZdzPSSr1B8a4oQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/rtl-css-js": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/rtl-css-js/-/rtl-css-js-1.16.1.tgz", - "integrity": "sha512-lRQgou1mu19e+Ya0LsTvKrVJ5TYUbqCVPAiImX3UfLTenarvPUl1QFdvu5Z3PYmHT9RCcwIfbjRQBntExyj3Zg==", + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.1.tgz", + "integrity": "sha512-bmkNc7z8Wn6kgjZmVHOX3SowGmVdhYS7yBpMnuMnPzDq/6xwVA604DuOXMZTO1lvq01g+Adfa0pE2UKGlxL1XQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@babel/runtime": "^7.1.2" + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/scheduler": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", - "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", + "node_modules/micromark-util-decode-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.0.tgz", + "integrity": "sha512-r4Sc6leeUTn3P6gk20aFMj2ntPwn6qpDZqWvYmAG6NgvFTIlj4WtrAudLi65qYoaGdXYViXYw2pkmn7QnIFasA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "loose-envify": "^1.1.0" + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "node_modules/micromark-util-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz", + "integrity": "sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" }, - "node_modules/stylis": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.1.3.tgz", - "integrity": "sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA==" + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.0.tgz", + "integrity": "sha512-xNn4Pqkj2puRhKdKTm8t1YHC/BAjx6CEwRFXntTaRf/x16aqka6ouVoutm+QdkISTlT7e2zU7U4ZdlDLJd2Mcw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" }, - "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.0.tgz", + "integrity": "sha512-2xhYT0sfo85FMrUPtHcPo2rrp1lwbDEEzpx7jiH2xXJLqBuy4H0GgXk5ToU8IEwoROtXuL8ND0ttVa4rNqYK3w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/to-fast-properties": { + "node_modules/micromark-util-resolve-all": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true, - "engines": { - "node": ">=4" + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.0.tgz", + "integrity": "sha512-6KU6qO7DZ7GJkaCgwBNtplXCvGkJToU86ybBAUdavvgsCiG8lSSvYxr9MhwmQ+udpzywHsl4RpGJsYWG1pDOcA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" } }, - "node_modules/tslib": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", - "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz", + "integrity": "sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } }, - "node_modules/typescript": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", - "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" + "node_modules/micromark-util-subtokenize": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.1.tgz", + "integrity": "sha512-jZNtiFl/1aY73yS3UGQkutD0UbhTt68qnRpw2Pifmz5wV9h8gOVsN70v+Lq/f1rKaU/W8pxRe8y8Q9FX1AOe1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", - "dev": true, + "node_modules/micromark-util-symbol": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", + "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", "funding": [ { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" }, { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.0.tgz", + "integrity": "sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/nanoid": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "dev": true, + "funding": [ { "type": "github", "url": "https://github.com/sponsors/ai" } ], - "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" - }, "bin": { - "update-browserslist-db": "cli.js" + "nanoid": "bin/nanoid.cjs" }, - "peerDependencies": { - "browserslist": ">= 4.21.0" + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, - "node_modules/vite": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.4.2.tgz", - "integrity": "sha512-zUcsJN+UvdSyHhYa277UHhiJ3iq4hUBwHavOpsNUGsTgjBeoBlK8eDt+iT09pBq0h9/knhG/SPrZiM7cGmg7NA==", - "dev": true, + "node_modules/ndjson-readablestream": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/ndjson-readablestream/-/ndjson-readablestream-1.2.0.tgz", + "integrity": "sha512-QbWX2IIfKMVL+ZFHm9vFEzPh1NzZfzJql59T+9XoXzUp8n0wu2t9qgDV9nT0A77YYa6KbAjsHNWzJfpZTfp4xQ==" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", "dependencies": { - "esbuild": "^0.18.10", - "postcss": "^8.4.24", - "rollup": "^3.25.2" - }, - "bin": { - "vite": "bin/vite.js" + "whatwg-url": "^5.0.0" }, "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" + "node": "4.x || >=6.0.0" }, "peerDependencies": { - "@types/node": ">= 14", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" + "encoding": "^0.1.0" }, "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { + "encoding": { "optional": true } } }, - "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true - } - }, - "dependencies": { - "@ampproject/remapping": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", - "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", - "dev": true, - "requires": { - "@jridgewell/gen-mapping": "^0.3.0", - "@jridgewell/trace-mapping": "^0.3.9" - } - }, - "@babel/code-frame": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.5.tgz", - "integrity": "sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ==", - "dev": true, - "requires": { - "@babel/highlight": "^7.22.5" - } - }, - "@babel/compat-data": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.6.tgz", - "integrity": "sha512-29tfsWTq2Ftu7MXmimyC0C5FDZv5DYxOZkh3XD3+QW4V/BYuv/LyEsjj3c0hqedEaDt6DBfDvexMKU8YevdqFg==", - "dev": true - }, - "@babel/core": { - "version": "7.22.8", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.8.tgz", - "integrity": "sha512-75+KxFB4CZqYRXjx4NlR4J7yGvKumBuZTmV4NV6v09dVXXkuYVYLT68N6HCzLvfJ+fWCxQsntNzKwwIXL4bHnw==", - "dev": true, - "requires": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.22.5", - "@babel/generator": "^7.22.7", - "@babel/helper-compilation-targets": "^7.22.6", - "@babel/helper-module-transforms": "^7.22.5", - "@babel/helpers": "^7.22.6", - "@babel/parser": "^7.22.7", - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.8", - "@babel/types": "^7.22.5", - "@nicolo-ribaudo/semver-v6": "^6.3.3", - "convert-source-map": "^1.7.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.2" - } - }, - "@babel/generator": { - "version": "7.22.7", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.22.7.tgz", - "integrity": "sha512-p+jPjMG+SI8yvIaxGgeW24u7q9+5+TGpZh8/CuB7RhBKd7RCy8FayNEFNNKrNK/eUcY/4ExQqLmyrvBXKsIcwQ==", - "dev": true, - "requires": { - "@babel/types": "^7.22.5", - "@jridgewell/gen-mapping": "^0.3.2", - "@jridgewell/trace-mapping": "^0.3.17", - "jsesc": "^2.5.1" - } - }, - "@babel/helper-compilation-targets": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.6.tgz", - "integrity": "sha512-534sYEqWD9VfUm3IPn2SLcH4Q3P86XL+QvqdC7ZsFrzyyPF3T4XGiVghF6PTYNdWg6pXuoqXxNQAhbYeEInTzA==", - "dev": true, - "requires": { - "@babel/compat-data": "^7.22.6", - "@babel/helper-validator-option": "^7.22.5", - "@nicolo-ribaudo/semver-v6": "^6.3.3", - "browserslist": "^4.21.9", - "lru-cache": "^5.1.1" - } - }, - "@babel/helper-environment-visitor": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz", - "integrity": "sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==", - "dev": true - }, - "@babel/helper-function-name": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz", - "integrity": "sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==", - "dev": true, - "requires": { - "@babel/template": "^7.22.5", - "@babel/types": "^7.22.5" - } - }, - "@babel/helper-hoist-variables": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", - "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", - "dev": true, - "requires": { - "@babel/types": "^7.22.5" - } - }, - "@babel/helper-module-imports": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz", - "integrity": "sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg==", - "dev": true, - "requires": { - "@babel/types": "^7.22.5" - } - }, - "@babel/helper-module-transforms": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz", - "integrity": "sha512-+hGKDt/Ze8GFExiVHno/2dvG5IdstpzCq0y4Qc9OJ25D4q3pKfiIP/4Vp3/JvhDkLKsDK2api3q3fpIgiIF5bw==", - "dev": true, - "requires": { - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-module-imports": "^7.22.5", - "@babel/helper-simple-access": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.5", - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.5", - "@babel/types": "^7.22.5" - } - }, - "@babel/helper-plugin-utils": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz", - "integrity": "sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==", + "node_modules/node-releases": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", "dev": true }, - "@babel/helper-simple-access": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz", - "integrity": "sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==", - "dev": true, - "requires": { - "@babel/types": "^7.22.5" + "node_modules/object-assign": { + "version": "4.1.1", + "license": "MIT", + "engines": { + "node": ">=0.10.0" } }, - "@babel/helper-split-export-declaration": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz", - "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==", + "node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", "dev": true, - "requires": { - "@babel/types": "^7.22.5" + "license": "MIT", + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "@babel/helper-string-parser": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", - "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", - "dev": true - }, - "@babel/helper-validator-identifier": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz", - "integrity": "sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==", - "dev": true - }, - "@babel/helper-validator-option": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz", - "integrity": "sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw==", - "dev": true - }, - "@babel/helpers": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.6.tgz", - "integrity": "sha512-YjDs6y/fVOYFV8hAf1rxd1QvR9wJe1pDBZ2AREKq/SDayfPzgk0PBnVuTCE5X1acEpMMNOVUqoe+OwiZGJ+OaA==", - "dev": true, - "requires": { - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.6", - "@babel/types": "^7.22.5" + "node_modules/parse-entities": { + "version": "2.0.0", + "license": "MIT", + "dependencies": { + "character-entities": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "character-reference-invalid": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-decimal": "^1.0.0", + "is-hexadecimal": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "@babel/highlight": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.5.tgz", - "integrity": "sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw==", - "dev": true, - "requires": { - "@babel/helper-validator-identifier": "^7.22.5", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" + "node_modules/parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "license": "MIT", + "dependencies": { + "entities": "^4.4.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" } }, - "@babel/parser": { - "version": "7.22.7", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.22.7.tgz", - "integrity": "sha512-7NF8pOkHP5o2vpmGgNGcfAeCvOYhGLyA3Z4eBQkT1RJlWu47n63bCs93QfJ2hIAFCil7L5P2IWhs1oToVgrL0Q==", + "node_modules/picocolors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, - "@babel/plugin-transform-react-jsx-self": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.22.5.tgz", - "integrity": "sha512-nTh2ogNUtxbiSbxaT4Ds6aXnXEipHweN9YRgOX/oNXdf0cCrGn/+2LozFa3lnPV5D90MkjhgckCPBrsoSc1a7g==", - "dev": true, - "requires": { - "@babel/helper-plugin-utils": "^7.22.5" - } - }, - "@babel/plugin-transform-react-jsx-source": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.22.5.tgz", - "integrity": "sha512-yIiRO6yobeEIaI0RTbIr8iAK9FcBHLtZq0S89ZPjDLQXBA4xvghaKqI0etp/tF3htTM0sazJKKLz9oEiGRtu7w==", + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, - "requires": { - "@babel/helper-plugin-utils": "^7.22.5" - } - }, - "@babel/runtime": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz", - "integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==", - "requires": { - "regenerator-runtime": "^0.13.11" + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, - "@babel/template": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.5.tgz", - "integrity": "sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==", + "node_modules/postcss": { + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", "dev": true, - "requires": { - "@babel/code-frame": "^7.22.5", - "@babel/parser": "^7.22.5", - "@babel/types": "^7.22.5" + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" } }, - "@babel/traverse": { - "version": "7.22.8", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.22.8.tgz", - "integrity": "sha512-y6LPR+wpM2I3qJrsheCTwhIinzkETbplIgPBbwvqPKc+uljeA5gP+3nP8irdYt1mjQaDnlIcG+dw8OjAco4GXw==", + "node_modules/prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", "dev": true, - "requires": { - "@babel/code-frame": "^7.22.5", - "@babel/generator": "^7.22.7", - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-function-name": "^7.22.5", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.22.7", - "@babel/types": "^7.22.5", - "debug": "^4.1.0", - "globals": "^11.1.0" + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" } }, - "@babel/types": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.22.5.tgz", - "integrity": "sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA==", - "dev": true, - "requires": { - "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.5", - "to-fast-properties": "^2.0.0" + "node_modules/prismjs": { + "version": "1.29.0", + "license": "MIT", + "engines": { + "node": ">=6" } }, - "@emotion/hash": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.0.tgz", - "integrity": "sha512-14FtKiHhy2QoPIzdTcvh//8OyBlknNs2nXRwIhG904opCby3l+9Xaf/wuPvICBF0rc1ZCNBd3nKe9cd2mecVkQ==" - }, - "@esbuild/android-arm": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.11.tgz", - "integrity": "sha512-q4qlUf5ucwbUJZXF5tEQ8LF7y0Nk4P58hOsGk3ucY0oCwgQqAnqXVbUuahCddVHfrxmpyewRpiTHwVHIETYu7Q==", - "dev": true, - "optional": true - }, - "@esbuild/android-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.11.tgz", - "integrity": "sha512-snieiq75Z1z5LJX9cduSAjUr7vEI1OdlzFPMw0HH5YI7qQHDd3qs+WZoMrWYDsfRJSq36lIA6mfZBkvL46KoIw==", - "dev": true, - "optional": true - }, - "@esbuild/android-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.11.tgz", - "integrity": "sha512-iPuoxQEV34+hTF6FT7om+Qwziv1U519lEOvekXO9zaMMlT9+XneAhKL32DW3H7okrCOBQ44BMihE8dclbZtTuw==", - "dev": true, - "optional": true - }, - "@esbuild/darwin-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.11.tgz", - "integrity": "sha512-Gm0QkI3k402OpfMKyQEEMG0RuW2LQsSmI6OeO4El2ojJMoF5NLYb3qMIjvbG/lbMeLOGiW6ooU8xqc+S0fgz2w==", - "dev": true, - "optional": true - }, - "@esbuild/darwin-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.11.tgz", - "integrity": "sha512-N15Vzy0YNHu6cfyDOjiyfJlRJCB/ngKOAvoBf1qybG3eOq0SL2Lutzz9N7DYUbb7Q23XtHPn6lMDF6uWbGv9Fw==", - "dev": true, - "optional": true - }, - "@esbuild/freebsd-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.11.tgz", - "integrity": "sha512-atEyuq6a3omEY5qAh5jIORWk8MzFnCpSTUruBgeyN9jZq1K/QI9uke0ATi3MHu4L8c59CnIi4+1jDKMuqmR71A==", - "dev": true, - "optional": true - }, - "@esbuild/freebsd-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.11.tgz", - "integrity": "sha512-XtuPrEfBj/YYYnAAB7KcorzzpGTvOr/dTtXPGesRfmflqhA4LMF0Gh/n5+a9JBzPuJ+CGk17CA++Hmr1F/gI0Q==", - "dev": true, - "optional": true - }, - "@esbuild/linux-arm": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.11.tgz", - "integrity": "sha512-Idipz+Taso/toi2ETugShXjQ3S59b6m62KmLHkJlSq/cBejixmIydqrtM2XTvNCywFl3VC7SreSf6NV0i6sRyg==", - "dev": true, - "optional": true - }, - "@esbuild/linux-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.11.tgz", - "integrity": "sha512-c6Vh2WS9VFKxKZ2TvJdA7gdy0n6eSy+yunBvv4aqNCEhSWVor1TU43wNRp2YLO9Vng2G+W94aRz+ILDSwAiYog==", - "dev": true, - "optional": true - }, - "@esbuild/linux-ia32": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.11.tgz", - "integrity": "sha512-S3hkIF6KUqRh9n1Q0dSyYcWmcVa9Cg+mSoZEfFuzoYXXsk6196qndrM+ZiHNwpZKi3XOXpShZZ+9dfN5ykqjjw==", - "dev": true, - "optional": true - }, - "@esbuild/linux-loong64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.11.tgz", - "integrity": "sha512-MRESANOoObQINBA+RMZW+Z0TJWpibtE7cPFnahzyQHDCA9X9LOmGh68MVimZlM9J8n5Ia8lU773te6O3ILW8kw==", - "dev": true, - "optional": true - }, - "@esbuild/linux-mips64el": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.11.tgz", - "integrity": "sha512-qVyPIZrXNMOLYegtD1u8EBccCrBVshxMrn5MkuFc3mEVsw7CCQHaqZ4jm9hbn4gWY95XFnb7i4SsT3eflxZsUg==", - "dev": true, - "optional": true - }, - "@esbuild/linux-ppc64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.11.tgz", - "integrity": "sha512-T3yd8vJXfPirZaUOoA9D2ZjxZX4Gr3QuC3GztBJA6PklLotc/7sXTOuuRkhE9W/5JvJP/K9b99ayPNAD+R+4qQ==", - "dev": true, - "optional": true - }, - "@esbuild/linux-riscv64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.11.tgz", - "integrity": "sha512-evUoRPWiwuFk++snjH9e2cAjF5VVSTj+Dnf+rkO/Q20tRqv+644279TZlPK8nUGunjPAtQRCj1jQkDAvL6rm2w==", - "dev": true, - "optional": true - }, - "@esbuild/linux-s390x": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.11.tgz", - "integrity": "sha512-/SlRJ15XR6i93gRWquRxYCfhTeC5PdqEapKoLbX63PLCmAkXZHY2uQm2l9bN0oPHBsOw2IswRZctMYS0MijFcg==", - "dev": true, - "optional": true - }, - "@esbuild/linux-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.11.tgz", - "integrity": "sha512-xcncej+wF16WEmIwPtCHi0qmx1FweBqgsRtEL1mSHLFR6/mb3GEZfLQnx+pUDfRDEM4DQF8dpXIW7eDOZl1IbA==", - "dev": true, - "optional": true + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } }, - "@esbuild/netbsd-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.11.tgz", - "integrity": "sha512-aSjMHj/F7BuS1CptSXNg6S3M4F3bLp5wfFPIJM+Km2NfIVfFKhdmfHF9frhiCLIGVzDziggqWll0B+9AUbud/Q==", - "dev": true, - "optional": true + "node_modules/prop-types/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, - "@esbuild/openbsd-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.11.tgz", - "integrity": "sha512-tNBq+6XIBZtht0xJGv7IBB5XaSyvYPCm1PxJ33zLQONdZoLVM0bgGqUrXnJyiEguD9LU4AHiu+GCXy/Hm9LsdQ==", - "dev": true, - "optional": true + "node_modules/property-information": { + "version": "5.6.0", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, - "@esbuild/sunos-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.11.tgz", - "integrity": "sha512-kxfbDOrH4dHuAAOhr7D7EqaYf+W45LsAOOhAet99EyuxxQmjbk8M9N4ezHcEiCYPaiW8Dj3K26Z2V17Gt6p3ng==", - "dev": true, - "optional": true + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } }, - "@esbuild/win32-arm64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.11.tgz", - "integrity": "sha512-Sh0dDRyk1Xi348idbal7lZyfSkjhJsdFeuC13zqdipsvMetlGiFQNdO+Yfp6f6B4FbyQm7qsk16yaZk25LChzg==", - "dev": true, - "optional": true + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } }, - "@esbuild/win32-ia32": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.11.tgz", - "integrity": "sha512-o9JUIKF1j0rqJTFbIoF4bXj6rvrTZYOrfRcGyL0Vm5uJ/j5CkBD/51tpdxe9lXEDouhRgdr/BYzUrDOvrWwJpg==", - "dev": true, - "optional": true + "node_modules/react-dom/node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "dependencies": { + "loose-envify": "^1.1.0" + } }, - "@esbuild/win32-x64": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.11.tgz", - "integrity": "sha512-rQI4cjLHd2hGsM1LqgDI7oOCYbQ6IBOVsX9ejuRMSze0GqXUG2ekwiKkiBU1pRGSeCqFFHxTrcEydB2Hyoz9CA==", - "dev": true, - "optional": true + "node_modules/react-fast-compare": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.2.tgz", + "integrity": "sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==", + "license": "MIT" }, - "@fluentui/date-time-utilities": { - "version": "8.5.13", - "resolved": "https://registry.npmjs.org/@fluentui/date-time-utilities/-/date-time-utilities-8.5.13.tgz", - "integrity": "sha512-X3clbPKh0URkDj21QoARw6SNec7dWg7Gt7SkTlkVYFzmZUdC4ZIrYk3n36xKe3U1wcGp26EVmKjhAhB262ugpw==", - "requires": { - "@fluentui/set-version": "^8.2.11", - "tslib": "^2.1.0" + "node_modules/react-helmet-async": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/react-helmet-async/-/react-helmet-async-2.0.5.tgz", + "integrity": "sha512-rYUYHeus+i27MvFE+Jaa4WsyBKGkL6qVgbJvSBoX8mbsWoABJXdEO0bZyi0F6i+4f0NuIb8AvqPMj3iXFHkMwg==", + "license": "Apache-2.0", + "dependencies": { + "invariant": "^2.2.4", + "react-fast-compare": "^3.2.2", + "shallowequal": "^1.1.0" + }, + "peerDependencies": { + "react": "^16.6.0 || ^17.0.0 || ^18.0.0" } }, - "@fluentui/dom-utilities": { - "version": "2.2.11", - "resolved": "https://registry.npmjs.org/@fluentui/dom-utilities/-/dom-utilities-2.2.11.tgz", - "integrity": "sha512-2tXfg7/9PXu9nfU72/P3o3waHEFEQtHUfQbVexUaYqNNAxMj6sOfsqpUx4vd5nPgO+grSWrl+spqlLN2yej51w==", - "requires": { - "@fluentui/set-version": "^8.2.11", - "tslib": "^2.1.0" + "node_modules/react-i18next": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/react-i18next/-/react-i18next-15.4.1.tgz", + "integrity": "sha512-ahGab+IaSgZmNPYXdV1n+OYky95TGpFwnKRflX/16dY04DsYYKHtVLjeny7sBSCREEcoMbAgSkFiGLF5g5Oofw==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.25.0", + "html-parse-stringify": "^3.0.1" + }, + "peerDependencies": { + "i18next": ">= 23.2.3", + "react": ">= 16.8.0" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + }, + "react-native": { + "optional": true + } } }, - "@fluentui/font-icons-mdl2": { - "version": "8.5.23", - "resolved": "https://registry.npmjs.org/@fluentui/font-icons-mdl2/-/font-icons-mdl2-8.5.23.tgz", - "integrity": "sha512-jZjUtfQm9/84jX34zhwwsoZME86xXXgKAgBYuMvRStKzXGdZcd7YSOlmuT8lbISmtFL/SWwUGOEal1nLCUNeNA==", - "requires": { - "@fluentui/set-version": "^8.2.11", - "@fluentui/style-utilities": "^8.9.16", - "@fluentui/utilities": "^8.13.18", - "tslib": "^2.1.0" + "node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "license": "MIT" + }, + "node_modules/react-markdown": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-9.0.1.tgz", + "integrity": "sha512-186Gw/vF1uRkydbsOIkcGXw7aHq0sZOCRFFjGrr7b9+nVZg4UfA4enXCaxm4fUzecU38sWfrNDitGhshuU7rdg==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "devlop": "^1.0.0", + "hast-util-to-jsx-runtime": "^2.0.0", + "html-url-attributes": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=18", + "react": ">=18" } }, - "@fluentui/foundation-legacy": { - "version": "8.2.43", - "resolved": "https://registry.npmjs.org/@fluentui/foundation-legacy/-/foundation-legacy-8.2.43.tgz", - "integrity": "sha512-rXr71KxNcWDH2LmTsFZbP75p8HssLlVLaFAqEdLE+sKf/LNKmqkDVTNhDbHZxzxy0QnguI4aNHcyGhMZUH3MPA==", - "requires": { - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "@fluentui/style-utilities": "^8.9.16", - "@fluentui/utilities": "^8.13.18", - "tslib": "^2.1.0" + "node_modules/react-markdown/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" } }, - "@fluentui/keyboard-key": { - "version": "0.4.11", - "resolved": "https://registry.npmjs.org/@fluentui/keyboard-key/-/keyboard-key-0.4.11.tgz", - "integrity": "sha512-TVB/EloWado9AVp1niChgcdDOQAHGP5B30Dinmtfe7zi8OnstwPoxwFP6dHJDdpLQ6ZEUTaEHViSzvewl7Chag==", - "requires": { - "tslib": "^2.1.0" + "node_modules/react-refresh": { + "version": "0.14.2", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.2.tgz", + "integrity": "sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==", + "dev": true, + "engines": { + "node": ">=0.10.0" } }, - "@fluentui/merge-styles": { - "version": "8.5.12", - "resolved": "https://registry.npmjs.org/@fluentui/merge-styles/-/merge-styles-8.5.12.tgz", - "integrity": "sha512-ZnUo0YuMP7AYi68dkknFqVxopIAgbrUnqR/MZlemmRvBYyy1SMj1WQeHcoiLFA8mF8YKn7B+jxQgJbN2bfcrRw==", - "requires": { - "@fluentui/set-version": "^8.2.11", - "tslib": "^2.1.0" + "node_modules/react-router": { + "version": "6.28.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.28.0.tgz", + "integrity": "sha512-HrYdIFqdrnhDw0PqG/AKjAqEqM7AvxCz0DQ4h2W8k6nqmc5uRBYDag0SBxx9iYz5G8gnuNVLzUe13wl9eAsXXg==", + "dependencies": { + "@remix-run/router": "1.21.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8" } }, - "@fluentui/react": { - "version": "8.110.7", - "resolved": "https://registry.npmjs.org/@fluentui/react/-/react-8.110.7.tgz", - "integrity": "sha512-3sn4HZL10jghiYFF+Ouc7pNDJ5pR2ueU6ZY1IdmVFgYXTJJ/IwQhVc37mXVf8VoUM7hF4vRcGE4z+loNTpTX0w==", - "requires": { - "@fluentui/date-time-utilities": "^8.5.13", - "@fluentui/font-icons-mdl2": "^8.5.23", - "@fluentui/foundation-legacy": "^8.2.43", - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/react-focus": "^8.8.30", - "@fluentui/react-hooks": "^8.6.29", - "@fluentui/react-portal-compat-context": "^9.0.6", - "@fluentui/react-window-provider": "^2.2.15", - "@fluentui/set-version": "^8.2.11", - "@fluentui/style-utilities": "^8.9.16", - "@fluentui/theme": "^2.6.34", - "@fluentui/utilities": "^8.13.18", - "@microsoft/load-themed-styles": "^1.10.26", - "tslib": "^2.1.0" + "node_modules/react-router-dom": { + "version": "6.28.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.28.0.tgz", + "integrity": "sha512-kQ7Unsl5YdyOltsPGl31zOjLrDv+m2VcIEcIHqYYD3Lp0UppLjrzcfJqDJwXxFw3TH/yvapbnUvPlAj7Kx5nbg==", + "dependencies": { + "@remix-run/router": "1.21.0", + "react-router": "6.28.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" } }, - "@fluentui/react-focus": { - "version": "8.8.30", - "resolved": "https://registry.npmjs.org/@fluentui/react-focus/-/react-focus-8.8.30.tgz", - "integrity": "sha512-dKQQtNTZbQOE+u/Tmh7AbtJPSpzQNI0L8o55a22y4U7s33rizUd++CIiToXsB+bPvlotcmpZswZQ8V06zM4KIw==", - "requires": { - "@fluentui/keyboard-key": "^0.4.11", - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "@fluentui/style-utilities": "^8.9.16", - "@fluentui/utilities": "^8.13.18", - "tslib": "^2.1.0" + "node_modules/react-syntax-highlighter": { + "version": "15.6.1", + "resolved": "https://registry.npmjs.org/react-syntax-highlighter/-/react-syntax-highlighter-15.6.1.tgz", + "integrity": "sha512-OqJ2/vL7lEeV5zTJyG7kmARppUjiB9h9udl4qHQjjgEos66z00Ia0OckwYfRxCSFrW8RJIBnsBwQsHZbVPspqg==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "highlight.js": "^10.4.1", + "highlightjs-vue": "^1.0.0", + "lowlight": "^1.17.0", + "prismjs": "^1.27.0", + "refractor": "^3.6.0" + }, + "peerDependencies": { + "react": ">= 0.14.0" } }, - "@fluentui/react-hooks": { - "version": "8.6.29", - "resolved": "https://registry.npmjs.org/@fluentui/react-hooks/-/react-hooks-8.6.29.tgz", - "integrity": "sha512-MeVevmGJtrYxdhoarrkVWE0Hs4XdzOc9A3tiOjMBIcwOvoOYOAoOELoHK/wuulPVwUn2R9Y+7JpJ6oCe4ImdJw==", - "requires": { - "@fluentui/react-window-provider": "^2.2.15", - "@fluentui/set-version": "^8.2.11", - "@fluentui/utilities": "^8.13.18", - "tslib": "^2.1.0" + "node_modules/react-transition-group": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", + "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==", + "dependencies": { + "@babel/runtime": "^7.5.5", + "dom-helpers": "^5.0.1", + "loose-envify": "^1.4.0", + "prop-types": "^15.6.2" + }, + "peerDependencies": { + "react": ">=16.6.0", + "react-dom": ">=16.6.0" } }, - "@fluentui/react-icons": { - "version": "2.0.206", - "resolved": "https://registry.npmjs.org/@fluentui/react-icons/-/react-icons-2.0.206.tgz", - "integrity": "sha512-Hq+QCAQbmIR8Pi7eihNQQgV3NmBcNP7j0Px1gBusgqK+RfttE0ubLym3SwQ3/Zz4neaDgAa9QKoArgEt3HorvA==", - "requires": { - "@griffel/react": "^1.0.0", - "tslib": "^2.1.0" + "node_modules/refractor": { + "version": "3.6.0", + "license": "MIT", + "dependencies": { + "hastscript": "^6.0.0", + "parse-entities": "^2.0.0", + "prismjs": "~1.27.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "@fluentui/react-portal-compat-context": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/@fluentui/react-portal-compat-context/-/react-portal-compat-context-9.0.6.tgz", - "integrity": "sha512-HUt0/YXKRB4chtzlGbZ+7y7FHFyqaI0CeMFAe/QBXVOiOwA01QOr2j4Uky+30vupspIt6mjodLanuw1jMybmqQ==", - "requires": { - "@swc/helpers": "^0.4.14" + "node_modules/refractor/node_modules/prismjs": { + "version": "1.27.0", + "license": "MIT", + "engines": { + "node": ">=6" } }, - "@fluentui/react-window-provider": { - "version": "2.2.15", - "resolved": "https://registry.npmjs.org/@fluentui/react-window-provider/-/react-window-provider-2.2.15.tgz", - "integrity": "sha512-RraWvRe7wakpPJRBX2tlCV/cybOKiqLJ1UBLPNf5xq7ZIs0T0g/hh3G3Zb5teOeipjuRnl6srkdDUT9Dy9wrBg==", - "requires": { - "@fluentui/set-version": "^8.2.11", - "tslib": "^2.1.0" + "node_modules/regenerator-runtime": { + "version": "0.14.0", + "license": "MIT" + }, + "node_modules/rehype-raw": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-7.0.0.tgz", + "integrity": "sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-raw": "^9.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "@fluentui/set-version": { - "version": "8.2.11", - "resolved": "https://registry.npmjs.org/@fluentui/set-version/-/set-version-8.2.11.tgz", - "integrity": "sha512-UI03tysau/adBO1a3q4uFZWQ3lfkiFcAWIFng4k5odWcCokfCm5IxA0urKqj5W5JRYdyoBUaq8QbcNGkFB4dCw==", - "requires": { - "tslib": "^2.1.0" + "node_modules/rehype-raw/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" } }, - "@fluentui/style-utilities": { - "version": "8.9.16", - "resolved": "https://registry.npmjs.org/@fluentui/style-utilities/-/style-utilities-8.9.16.tgz", - "integrity": "sha512-8hS5HscCFYvcWjAdk37frPZJZthr7f/cu5db7gjrPy+DEhf13WAZRHsropWm17+8GhJhvKt98BQf/Kzxtt34Eg==", - "requires": { - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "@fluentui/theme": "^2.6.34", - "@fluentui/utilities": "^8.13.18", - "@microsoft/load-themed-styles": "^1.10.26", - "tslib": "^2.1.0" + "node_modules/remark-gfm": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.0.tgz", + "integrity": "sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-gfm": "^3.0.0", + "micromark-extension-gfm": "^3.0.0", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "@fluentui/theme": { - "version": "2.6.34", - "resolved": "https://registry.npmjs.org/@fluentui/theme/-/theme-2.6.34.tgz", - "integrity": "sha512-2Ssi3sX2snnbPJ4PmxbpCDCGePRE36tvGj2qKgdKiSh/fPVsg1b+Q50YlpFl9sXmbhl1uFmxjAx6WPsVGTl7vQ==", - "requires": { - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "@fluentui/utilities": "^8.13.18", - "tslib": "^2.1.0" + "node_modules/remark-parse": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", + "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "@fluentui/utilities": { - "version": "8.13.18", - "resolved": "https://registry.npmjs.org/@fluentui/utilities/-/utilities-8.13.18.tgz", - "integrity": "sha512-/0rX9EzltLKwU1SS14VV7agWoOzruVTU3oagZq1QgFAvoj8qi7fNqvSX/VEeRy+0gmbsCkrEViUPkmC7drKzPg==", - "requires": { - "@fluentui/dom-utilities": "^2.2.11", - "@fluentui/merge-styles": "^8.5.12", - "@fluentui/set-version": "^8.2.11", - "tslib": "^2.1.0" + "node_modules/remark-rehype": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.0.tgz", + "integrity": "sha512-z3tJrAs2kIs1AqIIy6pzHmAHlF1hWQ+OdY4/hv+Wxe35EhyLKcajL33iUEn3ScxtFox9nUvRufR/Zre8Q08H/g==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "@griffel/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@griffel/core/-/core-1.10.0.tgz", - "integrity": "sha512-9yIBFswd6pcxtYsDVngplCHTyZ++cIk0htBOBVjxBKEoTkEmTgSvbIB2kKMiO3OJLrjzwoi9r+s3owugzIZe1w==", - "requires": { - "@emotion/hash": "^0.9.0", - "csstype": "^3.0.10", - "rtl-css-js": "^1.16.1", - "stylis": "^4.0.13", - "tslib": "^2.1.0" + "node_modules/remark-rehype/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" } }, - "@griffel/react": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@griffel/react/-/react-1.5.5.tgz", - "integrity": "sha512-MpAU0NEpBzNRWUGSlhgz3jzZRC+HbRI+P2lQIzyxoMFgzEB4QFtDnRDBwPLfi/Eoq55NlVmsxn2Pr3jJ/bjhRw==", - "requires": { - "@griffel/core": "^1.10.0", - "tslib": "^2.1.0" + "node_modules/remark-stringify": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", + "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-to-markdown": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "@jridgewell/gen-mapping": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", - "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true, - "requires": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" + "license": "MIT", + "engines": { + "node": ">=0.10.0" } }, - "@jridgewell/resolve-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", - "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", - "dev": true - }, - "@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", - "dev": true - }, - "@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", - "dev": true - }, - "@jridgewell/trace-mapping": { - "version": "0.3.18", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", - "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", + "node_modules/rollup": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz", + "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==", "dev": true, - "requires": { - "@jridgewell/resolve-uri": "3.1.0", - "@jridgewell/sourcemap-codec": "1.4.14" + "dependencies": { + "@types/estree": "1.0.5" + }, + "bin": { + "rollup": "dist/bin/rollup" }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.22.4", + "@rollup/rollup-android-arm64": "4.22.4", + "@rollup/rollup-darwin-arm64": "4.22.4", + "@rollup/rollup-darwin-x64": "4.22.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.22.4", + "@rollup/rollup-linux-arm-musleabihf": "4.22.4", + "@rollup/rollup-linux-arm64-gnu": "4.22.4", + "@rollup/rollup-linux-arm64-musl": "4.22.4", + "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4", + "@rollup/rollup-linux-riscv64-gnu": "4.22.4", + "@rollup/rollup-linux-s390x-gnu": "4.22.4", + "@rollup/rollup-linux-x64-gnu": "4.22.4", + "@rollup/rollup-linux-x64-musl": "4.22.4", + "@rollup/rollup-win32-arm64-msvc": "4.22.4", + "@rollup/rollup-win32-ia32-msvc": "4.22.4", + "@rollup/rollup-win32-x64-msvc": "4.22.4", + "fsevents": "~2.3.2" + } + }, + "node_modules/rollup-plugin-visualizer": { + "version": "5.12.0", + "resolved": "https://registry.npmjs.org/rollup-plugin-visualizer/-/rollup-plugin-visualizer-5.12.0.tgz", + "integrity": "sha512-8/NU9jXcHRs7Nnj07PF2o4gjxmm9lXIrZ8r175bT9dK8qoLlvKTwRMArRCMgpMGlq8CTLugRvEmyMeMXIU2pNQ==", + "dev": true, + "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": { - "version": "1.4.14", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", - "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", - "dev": true + "open": "^8.4.0", + "picomatch": "^2.3.1", + "source-map": "^0.7.4", + "yargs": "^17.5.1" + }, + "bin": { + "rollup-plugin-visualizer": "dist/bin/cli.js" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "rollup": "2.x || 3.x || 4.x" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true } } }, - "@microsoft/load-themed-styles": { - "version": "1.10.295", - "resolved": "https://registry.npmjs.org/@microsoft/load-themed-styles/-/load-themed-styles-1.10.295.tgz", - "integrity": "sha512-W+IzEBw8a6LOOfRJM02dTT7BDZijxm+Z7lhtOAz1+y9vQm1Kdz9jlAO+qCEKsfxtUOmKilW8DIRqFw2aUgKeGg==" - }, - "@nicolo-ribaudo/semver-v6": { - "version": "6.3.3", - "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/semver-v6/-/semver-v6-6.3.3.tgz", - "integrity": "sha512-3Yc1fUTs69MG/uZbJlLSI3JISMn2UV2rg+1D/vROUqZyh3l6iYHCs7GMp+M40ZD7yOdDbYjJcU1oTJhrc+dGKg==", - "dev": true + "node_modules/rtl-css-js": { + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/rtl-css-js/-/rtl-css-js-1.16.1.tgz", + "integrity": "sha512-lRQgou1mu19e+Ya0LsTvKrVJ5TYUbqCVPAiImX3UfLTenarvPUl1QFdvu5Z3PYmHT9RCcwIfbjRQBntExyj3Zg==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.1.2" + } }, - "@react-spring/animated": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/animated/-/animated-9.7.3.tgz", - "integrity": "sha512-5CWeNJt9pNgyvuSzQH+uy2pvTg8Y4/OisoscZIR8/ZNLIOI+CatFBhGZpDGTF/OzdNFsAoGk3wiUYTwoJ0YIvw==", - "requires": { - "@react-spring/shared": "~9.7.3", - "@react-spring/types": "~9.7.3" - } - }, - "@react-spring/core": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/core/-/core-9.7.3.tgz", - "integrity": "sha512-IqFdPVf3ZOC1Cx7+M0cXf4odNLxDC+n7IN3MDcVCTIOSBfqEcBebSv+vlY5AhM0zw05PDbjKrNmBpzv/AqpjnQ==", - "requires": { - "@react-spring/animated": "~9.7.3", - "@react-spring/shared": "~9.7.3", - "@react-spring/types": "~9.7.3" - } - }, - "@react-spring/shared": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/shared/-/shared-9.7.3.tgz", - "integrity": "sha512-NEopD+9S5xYyQ0pGtioacLhL2luflh6HACSSDUZOwLHoxA5eku1UPuqcJqjwSD6luKjjLfiLOspxo43FUHKKSA==", - "requires": { - "@react-spring/types": "~9.7.3" - } - }, - "@react-spring/types": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/types/-/types-9.7.3.tgz", - "integrity": "sha512-Kpx/fQ/ZFX31OtlqVEFfgaD1ACzul4NksrvIgYfIFq9JpDHFwQkMVZ10tbo0FU/grje4rcL4EIrjekl3kYwgWw==" - }, - "@react-spring/web": { - "version": "9.7.3", - "resolved": "https://registry.npmjs.org/@react-spring/web/-/web-9.7.3.tgz", - "integrity": "sha512-BXt6BpS9aJL/QdVqEIX9YoUy8CE6TJrU0mNCqSoxdXlIeNcEBWOfIyE6B14ENNsyQKS3wOWkiJfco0tCr/9tUg==", - "requires": { - "@react-spring/animated": "~9.7.3", - "@react-spring/core": "~9.7.3", - "@react-spring/shared": "~9.7.3", - "@react-spring/types": "~9.7.3" - } - }, - "@remix-run/router": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.7.1.tgz", - "integrity": "sha512-bgVQM4ZJ2u2CM8k1ey70o1ePFXsEzYVZoWghh6WjM8p59jQ7HxzbHW4SbnWFG7V9ig9chLawQxDTZ3xzOF8MkQ==" - }, - "@swc/helpers": { - "version": "0.4.14", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.4.14.tgz", - "integrity": "sha512-4C7nX/dvpzB7za4Ql9K81xK3HPxCpHMgwTZVyf+9JQ6VUbn9jjZVN7/Nkdz/Ugzs2CSjqnL/UPXroiVBVHUWUw==", - "requires": { - "tslib": "^2.4.0" + "node_modules/scheduler": { + "version": "0.20.2", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1" } }, - "@types/dompurify": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.2.tgz", - "integrity": "sha512-YBL4ziFebbbfQfH5mlC+QTJsvh0oJUrWbmxKMyEdL7emlHJqGR2Qb34TEFKj+VCayBvjKy3xczMFNhugThUsfQ==", + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, - "requires": { - "@types/trusted-types": "*" + "bin": { + "semver": "bin/semver.js" } }, - "@types/prop-types": { - "version": "15.7.5", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", - "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" - }, - "@types/react": { - "version": "18.2.14", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.14.tgz", - "integrity": "sha512-A0zjq+QN/O0Kpe30hA1GidzyFjatVvrpIvWLxD+xv67Vt91TWWgco9IvrJBkeyHm1trGaFS/FSGqPlhyeZRm0g==", - "requires": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } + "node_modules/shallowequal": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", + "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==", + "license": "MIT" }, - "@types/react-dom": { - "version": "18.2.6", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.6.tgz", - "integrity": "sha512-2et4PDvg6PVCyS7fuTc4gPoksV58bW0RwSxWKcPRcHZf0PRUGq03TKcD/rUHe3azfV6/5/biUBJw+HhCQjaP0A==", - "requires": { - "@types/react": "*" + "node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 8" } }, - "@types/scheduler": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", - "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==" - }, - "@types/trusted-types": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.3.tgz", - "integrity": "sha512-NfQ4gyz38SL8sDNrSixxU2Os1a5xcdFxipAFxYEuLUlvU2uDwS4NUpsImcf1//SlWItCVMMLiylsxbmNMToV/g==", - "dev": true - }, - "@vitejs/plugin-react": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.0.2.tgz", - "integrity": "sha512-zbnVp3Esfg33zDaoLrjxG+p/dPiOtpvJA+1oOEQwSxMMTRL9zi1eghIcd2WtLjkcKnPsa3S15LzS/OzDn2BOCA==", + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true, - "requires": { - "@babel/core": "^7.22.5", - "@babel/plugin-transform-react-jsx-self": "^7.22.5", - "@babel/plugin-transform-react-jsx-source": "^7.22.5", - "react-refresh": "^0.14.0" + "engines": { + "node": ">=0.10.0" } }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" + "node_modules/space-separated-tokens": { + "version": "1.1.5", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "browserslist": { - "version": "4.21.9", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.9.tgz", - "integrity": "sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg==", + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, - "requires": { - "caniuse-lite": "^1.0.30001503", - "electron-to-chromium": "^1.4.431", - "node-releases": "^2.0.12", - "update-browserslist-db": "^1.0.11" + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" } }, - "caniuse-lite": { - "version": "1.0.30001513", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001513.tgz", - "integrity": "sha512-pnjGJo7SOOjAGytZZ203Em95MRM8Cr6jhCXNF/FAXTpCTRTECnqQWLpiTRqrFtdYcth8hf4WECUpkezuYsMVww==", - "dev": true + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "license": "MIT", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "node_modules/stringify-entities/node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, - "requires": { - "color-name": "1.1.3" + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" } }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true + "node_modules/style-to-object": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.6.tgz", + "integrity": "sha512-khxq+Qm3xEyZfKd/y9L3oIWQimxuc4STrQKtQn8aSDRHb8mFgpukgX1hdzfrMEW6JCjyJ8p89x+IUMVnCBI1PA==", + "license": "MIT", + "dependencies": { + "inline-style-parser": "0.2.3" + } }, - "convert-source-map": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", - "dev": true + "node_modules/stylis": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.2.tgz", + "integrity": "sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg==", + "license": "MIT" }, - "csstype": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", - "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==" + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/tabster": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/tabster/-/tabster-8.2.0.tgz", + "integrity": "sha512-Gvplk/Yl/12aVFA6FPOqGcq31Qv8hbPfYO0N+6IxrRgRT6eSLsipT6gkZBYjyOwGsp6BD5XlZAuJgupfG/GHoA==", + "dependencies": { + "keyborg": "2.6.0", + "tslib": "^2.3.1" + } }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", "dev": true, - "requires": { - "ms": "2.1.2" + "engines": { + "node": ">=4" } }, - "dompurify": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.4.tgz", - "integrity": "sha512-ae0mA+Qiqp6C29pqZX3fQgK+F91+F7wobM/v8DRzDqJdZJELXiFUx4PP4pK/mzUS0xkiSEx3Ncd9gr69jg3YsQ==" + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" }, - "electron-to-chromium": { - "version": "1.4.454", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.454.tgz", - "integrity": "sha512-pmf1rbAStw8UEQ0sr2cdJtWl48ZMuPD9Sto8HVQOq9vx9j2WgDEN6lYoaqFvqEHYOmGA9oRGn7LqWI9ta0YugQ==", - "dev": true + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, - "esbuild": { - "version": "0.18.11", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.11.tgz", - "integrity": "sha512-i8u6mQF0JKJUlGR3OdFLKldJQMMs8OqM9Cc3UCi9XXziJ9WERM5bfkHaEAy0YAvPRMgqSW55W7xYn84XtEFTtA==", - "dev": true, - "requires": { - "@esbuild/android-arm": "0.18.11", - "@esbuild/android-arm64": "0.18.11", - "@esbuild/android-x64": "0.18.11", - "@esbuild/darwin-arm64": "0.18.11", - "@esbuild/darwin-x64": "0.18.11", - "@esbuild/freebsd-arm64": "0.18.11", - "@esbuild/freebsd-x64": "0.18.11", - "@esbuild/linux-arm": "0.18.11", - "@esbuild/linux-arm64": "0.18.11", - "@esbuild/linux-ia32": "0.18.11", - "@esbuild/linux-loong64": "0.18.11", - "@esbuild/linux-mips64el": "0.18.11", - "@esbuild/linux-ppc64": "0.18.11", - "@esbuild/linux-riscv64": "0.18.11", - "@esbuild/linux-s390x": "0.18.11", - "@esbuild/linux-x64": "0.18.11", - "@esbuild/netbsd-x64": "0.18.11", - "@esbuild/openbsd-x64": "0.18.11", - "@esbuild/sunos-x64": "0.18.11", - "@esbuild/win32-arm64": "0.18.11", - "@esbuild/win32-ia32": "0.18.11", - "@esbuild/win32-x64": "0.18.11" - } - }, - "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "dev": true + "node_modules/trough": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", + "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true + "node_modules/tslib": { + "version": "2.5.0", + "license": "0BSD" }, - "fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "optional": true + "node_modules/typescript": { + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", + "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", + "devOptional": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } }, - "gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true + "node_modules/unified": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", + "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "bail": "^2.0.0", + "devlop": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true + "node_modules/unified/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true + "node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + "node_modules/unist-util-is/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" }, - "jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true + "node_modules/unist-util-position/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" }, - "loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "requires": { - "js-tokens": "^3.0.0 || ^4.0.0" + "node_modules/unist-util-remove-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-5.0.0.tgz", + "integrity": "sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-visit": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "requires": { - "yallist": "^3.0.2" + "node_modules/unist-util-remove-position/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true + "node_modules/unist-util-stringify-position/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" }, - "nanoid": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", - "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==", - "dev": true + "node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node-releases": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", - "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", - "dev": true + "node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", - "dev": true + "node_modules/unist-util-visit-parents/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/unist-util-visit/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" }, - "postcss": { - "version": "8.4.25", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.25.tgz", - "integrity": "sha512-7taJ/8t2av0Z+sQEvNzCkpDynl0tX3uJMCODi6nT3PfASC7dYCWV9aQ+uiCf+KBD4SEFcu+GvJdGdwzQ6OSjCw==", + "node_modules/update-browserslist-db": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "dev": true, - "requires": { - "nanoid": "^3.3.6", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.0" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" } }, - "prettier": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.0.tgz", - "integrity": "sha512-zBf5eHpwHOGPC47h0zrPyNn+eAEIdEzfywMoYn2XPi0P44Zp0tSq64rq0xAREh4auw2cJZHo9QUob+NqCQky4g==", - "dev": true - }, - "react": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", - "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", - "requires": { - "loose-envify": "^1.1.0" + "node_modules/use-disposable": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/use-disposable/-/use-disposable-1.0.4.tgz", + "integrity": "sha512-j83t6AMLWUyb5zwlTDqf6dP9LezM9R0yTbI/b6olmdaGtCKQUe9pgJWV6dRaaQLcozypjIEp4EmZr2DkZGKLSg==", + "peerDependencies": { + "@types/react": ">=16.8.0 <19.0.0", + "@types/react-dom": ">=16.8.0 <19.0.0", + "react": ">=16.8.0 <19.0.0", + "react-dom": ">=16.8.0 <19.0.0" } }, - "react-dom": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", - "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", - "requires": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" + "node_modules/use-sync-external-store": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.2.tgz", + "integrity": "sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw==", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, - "react-refresh": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.0.tgz", - "integrity": "sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==", - "dev": true + "node_modules/vfile": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.2.tgz", + "integrity": "sha512-zND7NlS8rJYb/sPqkb13ZvbbUoExdbi4w3SfRrMq6R3FvnLQmmfpajJNITuuYm6AZ5uao9vy4BAos3EXBPf2rg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "react-router": { - "version": "6.14.1", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.14.1.tgz", - "integrity": "sha512-U4PfgvG55LdvbQjg5Y9QRWyVxIdO1LlpYT7x+tMAxd9/vmiPuJhIwdxZuIQLN/9e3O4KFDHYfR9gzGeYMasW8g==", - "requires": { - "@remix-run/router": "1.7.1" + "node_modules/vfile-location": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.3.tgz", + "integrity": "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "react-router-dom": { - "version": "6.14.1", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.14.1.tgz", - "integrity": "sha512-ssF6M5UkQjHK70fgukCJyjlda0Dgono2QGwqGvuk7D+EDGHdacEN3Yke2LTMjkrpHuFwBfDFsEjGVXBDmL+bWw==", - "requires": { - "@remix-run/router": "1.7.1", - "react-router": "6.14.1" + "node_modules/vfile-location/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "regenerator-runtime": { - "version": "0.13.11", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", - "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==" + "node_modules/vfile-message/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/vfile/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" }, - "rollup": { - "version": "3.26.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.26.2.tgz", - "integrity": "sha512-6umBIGVz93er97pMgQO08LuH3m6PUb3jlDUUGFsNJB6VgTCUaDFpupf5JfU30529m/UKOgmiX+uY6Sx8cOYpLA==", + "node_modules/vite": { + "version": "5.4.18", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.18.tgz", + "integrity": "sha512-1oDcnEp3lVyHCuQ2YFelM4Alm2o91xNoMncRm1U7S+JdYfYOvbiGZ3/CxGttrOu2M/KcGz7cRC2DoNUA6urmMA==", "dev": true, - "requires": { - "fsevents": "~2.3.2" + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } } }, - "rtl-css-js": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/rtl-css-js/-/rtl-css-js-1.16.1.tgz", - "integrity": "sha512-lRQgou1mu19e+Ya0LsTvKrVJ5TYUbqCVPAiImX3UfLTenarvPUl1QFdvu5Z3PYmHT9RCcwIfbjRQBntExyj3Zg==", - "requires": { - "@babel/runtime": "^7.1.2" + "node_modules/void-elements": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz", + "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" } }, - "scheduler": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", - "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", - "requires": { - "loose-envify": "^1.1.0" + "node_modules/web-namespaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", + "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", - "dev": true + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" }, - "stylis": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.1.3.tgz", - "integrity": "sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA==" + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, - "requires": { - "has-flag": "^3.0.0" + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true - }, - "tslib": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", - "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } }, - "typescript": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", - "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", - "dev": true + "node_modules/wrap-ansi/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } }, - "update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "node_modules/wrap-ansi/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true, - "requires": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "license": "MIT" + }, + "node_modules/xtend": { + "version": "4.0.2", + "license": "MIT", + "engines": { + "node": ">=0.4" } }, - "vite": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.4.2.tgz", - "integrity": "sha512-zUcsJN+UvdSyHhYa277UHhiJ3iq4hUBwHavOpsNUGsTgjBeoBlK8eDt+iT09pBq0h9/knhG/SPrZiM7cGmg7NA==", + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", "dev": true, - "requires": { - "esbuild": "^0.18.10", - "fsevents": "~2.3.2", - "postcss": "^8.4.24", - "rollup": "^3.25.2" + "license": "ISC", + "engines": { + "node": ">=10" } }, - "yallist": { + "node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", "dev": true + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } } } } diff --git a/app/frontend/package.json b/app/frontend/package.json index ae20e64738..731bf9ba12 100644 --- a/app/frontend/package.json +++ b/app/frontend/package.json @@ -4,29 +4,47 @@ "version": "0.0.0", "type": "module", "engines": { - "node": ">=14.0.0" + "node": ">=20.0.0" }, "scripts": { - "dev": "vite", + "dev": "vite --host 127.0.0.1", "build": "tsc && vite build", - "watch": "tsc && vite build --watch" + "preview": "vite preview" }, "dependencies": { - "@fluentui/react": "^8.110.7", - "@fluentui/react-icons": "^2.0.206", - "@react-spring/web": "^9.7.3", - "dompurify": "^3.0.4", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "react-router-dom": "^6.14.1" + "@azure/msal-browser": "^3.26.1", + "@azure/msal-react": "^2.2.0", + "@fluentui/react": "^8.112.5", + "@fluentui/react-components": "^9.56.2", + "@fluentui/react-icons": "^2.0.265", + "@react-spring/web": "^9.7.5", + "dompurify": "^3.2.4", + "i18next": "^24.2.0", + "i18next-browser-languagedetector": "^8.0.2", + "i18next-http-backend": "^3.0.1", + "idb": "^8.0.0", + "ndjson-readablestream": "^1.2.0", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-markdown": "^9.0.1", + "react-router-dom": "^6.28.0", + "react-helmet-async": "^2.0.5", + "react-i18next": "^15.4.1", + "react-syntax-highlighter": "^15.6.1", + "rehype-raw": "^7.0.0", + "remark-gfm": "^4.0.0", + "scheduler": "^0.20.2" }, "devDependencies": { - "@types/dompurify": "^3.0.2", - "@types/react": "^18.2.14", - "@types/react-dom": "^18.2.6", - "@vitejs/plugin-react": "^4.0.2", - "prettier": "^3.0.0", - "typescript": "^5.1.6", - "vite": "^4.4.2" + "@types/dom-speech-recognition": "^0.0.4", + "@types/dompurify": "^3.0.5", + "@types/react": "^18.3.12", + "@types/react-dom": "^18.3.1", + "@types/react-syntax-highlighter": "^15.5.13", + "@vitejs/plugin-react": "^4.3.3", + "prettier": "^3.3.3", + "typescript": "^5.6.3", + "vite": "^5.4.18", + "rollup-plugin-visualizer": "^5.12.0" } } diff --git a/app/frontend/src/api/api.ts b/app/frontend/src/api/api.ts index cbedab171d..df95f801b5 100644 --- a/app/frontend/src/api/api.ts +++ b/app/frontend/src/api/api.ts @@ -1,68 +1,191 @@ -import { AskRequest, AskResponse, ChatRequest } from "./models"; +const BACKEND_URI = ""; -export async function askApi(options: AskRequest): Promise { - const response = await fetch("/ask", { +import { ChatAppResponse, ChatAppResponseOrError, ChatAppRequest, Config, SimpleAPIResponse, HistoryListApiResponse, HistoryApiResponse } from "./models"; +import { useLogin, getToken, isUsingAppServicesLogin } from "../authConfig"; + +export async function getHeaders(idToken: string | undefined): Promise> { + // If using login and not using app services, add the id token of the logged in account as the authorization + if (useLogin && !isUsingAppServicesLogin) { + if (idToken) { + return { Authorization: `Bearer ${idToken}` }; + } + } + + return {}; +} + +export async function configApi(): Promise { + const response = await fetch(`${BACKEND_URI}/config`, { + method: "GET" + }); + + return (await response.json()) as Config; +} + +export async function askApi(request: ChatAppRequest, idToken: string | undefined): Promise { + const headers = await getHeaders(idToken); + const response = await fetch(`${BACKEND_URI}/ask`, { method: "POST", - headers: { - "Content-Type": "application/json" - }, - body: JSON.stringify({ - question: options.question, - approach: options.approach, - overrides: { - retrieval_mode: options.overrides?.retrievalMode, - semantic_ranker: options.overrides?.semanticRanker, - semantic_captions: options.overrides?.semanticCaptions, - top: options.overrides?.top, - temperature: options.overrides?.temperature, - prompt_template: options.overrides?.promptTemplate, - prompt_template_prefix: options.overrides?.promptTemplatePrefix, - prompt_template_suffix: options.overrides?.promptTemplateSuffix, - exclude_category: options.overrides?.excludeCategory - } - }) + headers: { ...headers, "Content-Type": "application/json" }, + body: JSON.stringify(request) }); - const parsedResponse: AskResponse = await response.json(); if (response.status > 299 || !response.ok) { - throw Error(parsedResponse.error || "Unknown error"); + throw Error(`Request failed with status ${response.status}`); } + const parsedResponse: ChatAppResponseOrError = await response.json(); + if (parsedResponse.error) { + throw Error(parsedResponse.error); + } + + return parsedResponse as ChatAppResponse; +} - return parsedResponse; +export async function chatApi(request: ChatAppRequest, shouldStream: boolean, idToken: string | undefined): Promise { + let url = `${BACKEND_URI}/chat`; + if (shouldStream) { + url += "/stream"; + } + const headers = await getHeaders(idToken); + return await fetch(url, { + method: "POST", + headers: { ...headers, "Content-Type": "application/json" }, + body: JSON.stringify(request) + }); } -export async function chatApi(options: ChatRequest): Promise { - const response = await fetch("/chat", { +export async function getSpeechApi(text: string): Promise { + return await fetch("/speech", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ - history: options.history, - approach: options.approach, - overrides: { - retrieval_mode: options.overrides?.retrievalMode, - semantic_ranker: options.overrides?.semanticRanker, - semantic_captions: options.overrides?.semanticCaptions, - top: options.overrides?.top, - temperature: options.overrides?.temperature, - prompt_template: options.overrides?.promptTemplate, - prompt_template_prefix: options.overrides?.promptTemplatePrefix, - prompt_template_suffix: options.overrides?.promptTemplateSuffix, - exclude_category: options.overrides?.excludeCategory, - suggest_followup_questions: options.overrides?.suggestFollowupQuestions + text: text + }) + }) + .then(response => { + if (response.status == 200) { + return response.blob(); + } else if (response.status == 400) { + console.log("Speech synthesis is not enabled."); + return null; + } else { + console.error("Unable to get speech synthesis."); + return null; } }) + .then(blob => (blob ? URL.createObjectURL(blob) : null)); +} + +export function getCitationFilePath(citation: string): string { + return `${BACKEND_URI}/content/${citation}`; +} + +export async function uploadFileApi(request: FormData, idToken: string): Promise { + const response = await fetch("/upload", { + method: "POST", + headers: await getHeaders(idToken), + body: request }); - const parsedResponse: AskResponse = await response.json(); - if (response.status > 299 || !response.ok) { - throw Error(parsedResponse.error || "Unknown error"); + if (!response.ok) { + throw new Error(`Uploading files failed: ${response.statusText}`); } - return parsedResponse; + const dataResponse: SimpleAPIResponse = await response.json(); + return dataResponse; } -export function getCitationFilePath(citation: string): string { - return `/content/${citation}`; +export async function deleteUploadedFileApi(filename: string, idToken: string): Promise { + const headers = await getHeaders(idToken); + const response = await fetch("/delete_uploaded", { + method: "POST", + headers: { ...headers, "Content-Type": "application/json" }, + body: JSON.stringify({ filename }) + }); + + if (!response.ok) { + throw new Error(`Deleting file failed: ${response.statusText}`); + } + + const dataResponse: SimpleAPIResponse = await response.json(); + return dataResponse; +} + +export async function listUploadedFilesApi(idToken: string): Promise { + const response = await fetch(`/list_uploaded`, { + method: "GET", + headers: await getHeaders(idToken) + }); + + if (!response.ok) { + throw new Error(`Listing files failed: ${response.statusText}`); + } + + const dataResponse: string[] = await response.json(); + return dataResponse; +} + +export async function postChatHistoryApi(item: any, idToken: string): Promise { + const headers = await getHeaders(idToken); + const response = await fetch("/chat_history", { + method: "POST", + headers: { ...headers, "Content-Type": "application/json" }, + body: JSON.stringify(item) + }); + + if (!response.ok) { + throw new Error(`Posting chat history failed: ${response.statusText}`); + } + + const dataResponse: any = await response.json(); + return dataResponse; +} + +export async function getChatHistoryListApi(count: number, continuationToken: string | undefined, idToken: string): Promise { + const headers = await getHeaders(idToken); + let url = `${BACKEND_URI}/chat_history/sessions?count=${count}`; + if (continuationToken) { + url += `&continuationToken=${continuationToken}`; + } + + const response = await fetch(url.toString(), { + method: "GET", + headers: { ...headers, "Content-Type": "application/json" } + }); + + if (!response.ok) { + throw new Error(`Getting chat histories failed: ${response.statusText}`); + } + + const dataResponse: HistoryListApiResponse = await response.json(); + return dataResponse; +} + +export async function getChatHistoryApi(id: string, idToken: string): Promise { + const headers = await getHeaders(idToken); + const response = await fetch(`/chat_history/sessions/${id}`, { + method: "GET", + headers: { ...headers, "Content-Type": "application/json" } + }); + + if (!response.ok) { + throw new Error(`Getting chat history failed: ${response.statusText}`); + } + + const dataResponse: HistoryApiResponse = await response.json(); + return dataResponse; +} + +export async function deleteChatHistoryApi(id: string, idToken: string): Promise { + const headers = await getHeaders(idToken); + const response = await fetch(`/chat_history/sessions/${id}`, { + method: "DELETE", + headers: { ...headers, "Content-Type": "application/json" } + }); + + if (!response.ok) { + throw new Error(`Deleting chat history failed: ${response.statusText}`); + } } diff --git a/app/frontend/src/api/models.ts b/app/frontend/src/api/models.ts index 310ce038fe..63ff5c31f4 100644 --- a/app/frontend/src/api/models.ts +++ b/app/frontend/src/api/models.ts @@ -1,48 +1,133 @@ -export const enum Approaches { - RetrieveThenRead = "rtr", - ReadRetrieveRead = "rrr", - ReadDecomposeAsk = "rda" -} - export const enum RetrievalMode { Hybrid = "hybrid", Vectors = "vectors", Text = "text" } -export type AskRequestOverrides = { - retrievalMode?: RetrievalMode; - semanticRanker?: boolean; - semanticCaptions?: boolean; - excludeCategory?: string; +export const enum GPT4VInput { + TextAndImages = "textAndImages", + Images = "images", + Texts = "texts" +} + +export const enum VectorFields { + Embedding = "textEmbeddingOnly", + ImageEmbedding = "imageEmbeddingOnly", + TextAndImageEmbeddings = "textAndImageEmbeddings" +} + +export type ChatAppRequestOverrides = { + retrieval_mode?: RetrievalMode; + semantic_ranker?: boolean; + semantic_captions?: boolean; + query_rewriting?: boolean; + reasoning_effort?: string; + include_category?: string; + exclude_category?: string; + seed?: number; top?: number; + max_subqueries?: number; + results_merge_strategy?: string; temperature?: number; - promptTemplate?: string; - promptTemplatePrefix?: string; - promptTemplateSuffix?: string; - suggestFollowupQuestions?: boolean; + minimum_search_score?: number; + minimum_reranker_score?: number; + prompt_template?: string; + prompt_template_prefix?: string; + prompt_template_suffix?: string; + suggest_followup_questions?: boolean; + use_oid_security_filter?: boolean; + use_groups_security_filter?: boolean; + use_gpt4v?: boolean; + gpt4v_input?: GPT4VInput; + vector_fields: VectorFields; + language: string; + use_agentic_retrieval: boolean; }; -export type AskRequest = { - question: string; - approach: Approaches; - overrides?: AskRequestOverrides; +export type ResponseMessage = { + content: string; + role: string; }; -export type AskResponse = { - answer: string; - thoughts: string | null; +export type Thoughts = { + title: string; + description: any; // It can be any output from the api + props?: { [key: string]: any }; +}; + +export type ResponseContext = { data_points: string[]; + followup_questions: string[] | null; + thoughts: Thoughts[]; +}; + +export type ChatAppResponseOrError = { + message: ResponseMessage; + delta: ResponseMessage; + context: ResponseContext; + session_state: any; error?: string; }; -export type ChatTurn = { - user: string; - bot?: string; +export type ChatAppResponse = { + message: ResponseMessage; + delta: ResponseMessage; + context: ResponseContext; + session_state: any; +}; + +export type ChatAppRequestContext = { + overrides?: ChatAppRequestOverrides; +}; + +export type ChatAppRequest = { + messages: ResponseMessage[]; + context?: ChatAppRequestContext; + session_state: any; +}; + +export type Config = { + defaultReasoningEffort: string; + showGPT4VOptions: boolean; + showSemanticRankerOption: boolean; + showQueryRewritingOption: boolean; + showReasoningEffortOption: boolean; + streamingEnabled: boolean; + showVectorOption: boolean; + showUserUpload: boolean; + showLanguagePicker: boolean; + showSpeechInput: boolean; + showSpeechOutputBrowser: boolean; + showSpeechOutputAzure: boolean; + showChatHistoryBrowser: boolean; + showChatHistoryCosmos: boolean; + showAgenticRetrievalOption: boolean; +}; + +export type SimpleAPIResponse = { + message?: string; +}; + +export interface SpeechConfig { + speechUrls: (string | null)[]; + setSpeechUrls: (urls: (string | null)[]) => void; + audio: HTMLAudioElement; + isPlaying: boolean; + setIsPlaying: (isPlaying: boolean) => void; +} + +export type HistoryListApiResponse = { + sessions: { + id: string; + entra_oid: string; + title: string; + timestamp: number; + }[]; + continuation_token?: string; }; -export type ChatRequest = { - history: ChatTurn[]; - approach: Approaches; - overrides?: AskRequestOverrides; +export type HistoryApiResponse = { + id: string; + entra_oid: string; + answers: any; }; diff --git a/app/frontend/src/assets/applogo.svg b/app/frontend/src/assets/applogo.svg new file mode 100644 index 0000000000..fb3a5b9712 --- /dev/null +++ b/app/frontend/src/assets/applogo.svg @@ -0,0 +1 @@ + diff --git a/app/frontend/src/assets/github.svg b/app/frontend/src/assets/github.svg deleted file mode 100644 index d5e6491854..0000000000 --- a/app/frontend/src/assets/github.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/app/frontend/src/assets/search.svg b/app/frontend/src/assets/search.svg deleted file mode 100644 index feadc50b25..0000000000 --- a/app/frontend/src/assets/search.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/app/frontend/src/authConfig.ts b/app/frontend/src/authConfig.ts new file mode 100644 index 0000000000..60de0e1a8c --- /dev/null +++ b/app/frontend/src/authConfig.ts @@ -0,0 +1,252 @@ +// Refactored from https://github.com/Azure-Samples/ms-identity-javascript-react-tutorial/blob/main/1-Authentication/1-sign-in/SPA/src/authConfig.js + +import { IPublicClientApplication } from "@azure/msal-browser"; + +const appServicesAuthTokenUrl = ".auth/me"; +const appServicesAuthTokenRefreshUrl = ".auth/refresh"; +const appServicesAuthLogoutUrl = ".auth/logout?post_logout_redirect_uri=/"; + +interface AppServicesToken { + id_token: string; + access_token: string; + user_claims: Record; + expires_on: string; +} + +interface AuthSetup { + // Set to true if login elements should be shown in the UI + useLogin: boolean; + // Set to true if access control is enforced by the application + requireAccessControl: boolean; + // Set to true if the application allows unauthenticated access (only applies for documents without access control) + enableUnauthenticatedAccess: boolean; + /** + * Configuration object to be passed to MSAL instance on creation. + * For a full list of MSAL.js configuration parameters, visit: + * https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/dev/lib/msal-browser/docs/configuration.md + */ + msalConfig: { + auth: { + clientId: string; // Client app id used for login + authority: string; // Directory to use for login https://learn.microsoft.com/entra/identity-platform/msal-client-application-configuration#authority + redirectUri: string; // Points to window.location.origin. You must register this URI on Azure Portal/App Registration. + postLogoutRedirectUri: string; // Indicates the page to navigate after logout. + navigateToLoginRequestUrl: boolean; // If "true", will navigate back to the original request location before processing the auth code response. + }; + cache: { + cacheLocation: string; // Configures cache location. "sessionStorage" is more secure, but "localStorage" gives you SSO between tabs. + storeAuthStateInCookie: boolean; // Set this to "true" if you are having issues on IE11 or Edge + }; + }; + loginRequest: { + /** + * Scopes you add here will be prompted for user consent during sign-in. + * By default, MSAL.js will add OIDC scopes (openid, profile, email) to any login request. + * For more information about OIDC scopes, visit: + * https://learn.microsoft.com/entra/identity-platform/permissions-consent-overview#openid-connect-scopes + */ + scopes: Array; + }; + tokenRequest: { + scopes: Array; + }; +} + +// Fetch the auth setup JSON data from the API if not already cached +async function fetchAuthSetup(): Promise { + const response = await fetch("/auth_setup"); + if (!response.ok) { + throw new Error(`auth setup response was not ok: ${response.status}`); + } + return await response.json(); +} + +const authSetup = await fetchAuthSetup(); + +export const useLogin = authSetup.useLogin; + +export const requireAccessControl = authSetup.requireAccessControl; + +export const enableUnauthenticatedAccess = authSetup.enableUnauthenticatedAccess; + +export const requireLogin = requireAccessControl && !enableUnauthenticatedAccess; + +/** + * Configuration object to be passed to MSAL instance on creation. + * For a full list of MSAL.js configuration parameters, visit: + * https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/dev/lib/msal-browser/docs/configuration.md + */ +export const msalConfig = authSetup.msalConfig; + +/** + * Scopes you add here will be prompted for user consent during sign-in. + * By default, MSAL.js will add OIDC scopes (openid, profile, email) to any login request. + * For more information about OIDC scopes, visit: + * https://learn.microsoft.com/entra/identity-platform/permissions-consent-overview#openid-connect-scopes + */ +export const loginRequest = authSetup.loginRequest; + +const tokenRequest = authSetup.tokenRequest; + +// Build an absolute redirect URI using the current window's location and the relative redirect URI from auth setup +export const getRedirectUri = () => { + return window.location.origin + authSetup.msalConfig.auth.redirectUri; +}; + +// Cache the app services token if it's available +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/this#global_context +declare global { + var cachedAppServicesToken: AppServicesToken | null; +} +globalThis.cachedAppServicesToken = null; + +/** + * Retrieves an access token if the user is logged in using app services authentication. + * Checks if the current token is expired and fetches a new token if necessary. + * Returns null if the app doesn't support app services authentication. + * + * @returns {Promise} A promise that resolves to an AppServicesToken if the user is authenticated, or null if authentication is not supported or fails. + */ +const getAppServicesToken = (): Promise => { + const checkNotExpired = (appServicesToken: AppServicesToken) => { + const currentDate = new Date(); + const expiresOnDate = new Date(appServicesToken.expires_on); + return expiresOnDate > currentDate; + }; + + if (globalThis.cachedAppServicesToken && checkNotExpired(globalThis.cachedAppServicesToken)) { + return Promise.resolve(globalThis.cachedAppServicesToken); + } + + const getAppServicesTokenFromMe: () => Promise = () => { + return fetch(appServicesAuthTokenUrl).then(r => { + if (r.ok) { + return r.json().then(json => { + if (json.length > 0) { + return { + id_token: json[0]["id_token"] as string, + access_token: json[0]["access_token"] as string, + user_claims: json[0]["user_claims"].reduce((acc: Record, item: Record) => { + acc[item.typ] = item.val; + return acc; + }, {}) as Record, + expires_on: json[0]["expires_on"] as string + } as AppServicesToken; + } + + return null; + }); + } + + return null; + }); + }; + + return getAppServicesTokenFromMe().then(token => { + if (token) { + if (checkNotExpired(token)) { + globalThis.cachedAppServicesToken = token; + return token; + } + + return fetch(appServicesAuthTokenRefreshUrl).then(r => { + if (r.ok) { + return getAppServicesTokenFromMe(); + } + return null; + }); + } + + return null; + }); +}; + +export const isUsingAppServicesLogin = (await getAppServicesToken()) != null; + +// Sign out of app services +// Learn more at https://learn.microsoft.com/azure/app-service/configure-authentication-customize-sign-in-out#sign-out-of-a-session +export const appServicesLogout = () => { + window.location.href = appServicesAuthLogoutUrl; +}; + +/** + * Determines if the user is logged in either via the MSAL public client application or the app services login. + * @param {IPublicClientApplication | undefined} client - The MSAL public client application instance, or undefined if not available. + * @returns {Promise} A promise that resolves to true if the user is logged in, false otherwise. + */ +export const checkLoggedIn = async (client: IPublicClientApplication | undefined): Promise => { + if (client) { + const activeAccount = client.getActiveAccount(); + if (activeAccount) { + return true; + } + } + + const appServicesToken = await getAppServicesToken(); + if (appServicesToken) { + return true; + } + + return false; +}; + +// Get an access token for use with the API server. +// ID token received when logging in may not be used for this purpose because it has the incorrect audience +// Use the access token from app services login if available +export const getToken = async (client: IPublicClientApplication): Promise => { + const appServicesToken = await getAppServicesToken(); + if (appServicesToken) { + return Promise.resolve(appServicesToken.access_token); + } + + return client + .acquireTokenSilent({ + ...tokenRequest, + redirectUri: getRedirectUri() + }) + .then(r => r.accessToken) + .catch(error => { + console.log(error); + return undefined; + }); +}; + +/** + * Retrieves the username of the active account. + * If no active account is found, attempts to retrieve the username from the app services login token if available. + * @param {IPublicClientApplication} client - The MSAL public client application instance. + * @returns {Promise} The username of the active account, or null if no username is found. + */ +export const getUsername = async (client: IPublicClientApplication): Promise => { + const activeAccount = client.getActiveAccount(); + if (activeAccount) { + return activeAccount.username; + } + + const appServicesToken = await getAppServicesToken(); + if (appServicesToken?.user_claims) { + return appServicesToken.user_claims.preferred_username; + } + + return null; +}; + +/** + * Retrieves the token claims of the active account. + * If no active account is found, attempts to retrieve the token claims from the app services login token if available. + * @param {IPublicClientApplication} client - The MSAL public client application instance. + * @returns {Promise | undefined>} A promise that resolves to the token claims of the active account, the user claims from the app services login token, or undefined if no claims are found. + */ +export const getTokenClaims = async (client: IPublicClientApplication): Promise | undefined> => { + const activeAccount = client.getActiveAccount(); + if (activeAccount) { + return activeAccount.idTokenClaims; + } + + const appServicesToken = await getAppServicesToken(); + if (appServicesToken) { + return appServicesToken.user_claims; + } + + return undefined; +}; diff --git a/app/frontend/src/components/AnalysisPanel/AgentPlan.tsx b/app/frontend/src/components/AnalysisPanel/AgentPlan.tsx new file mode 100644 index 0000000000..21e246eb12 --- /dev/null +++ b/app/frontend/src/components/AnalysisPanel/AgentPlan.tsx @@ -0,0 +1,78 @@ +import React from "react"; +import { TokenUsageGraph, TokenUsage } from "./TokenUsageGraph"; +import { Light as SyntaxHighlighter } from "react-syntax-highlighter"; +import { a11yLight } from "react-syntax-highlighter/dist/esm/styles/hljs"; +import json from "react-syntax-highlighter/dist/esm/languages/hljs/json"; +import styles from "./AnalysisPanel.module.css"; +SyntaxHighlighter.registerLanguage("json", json); + +type ModelQueryPlanningStep = { + id: number; + type: "ModelQueryPlanning"; + input_tokens: number; + output_tokens: number; +}; + +type AzureSearchQueryStep = { + id: number; + type: "AzureSearchQuery"; + target_index: string; + query: { search: string }; + query_time: string; + count: number; + elapsed_ms: number; +}; + +type Step = ModelQueryPlanningStep | AzureSearchQueryStep; + +interface Props { + query_plan: Step[]; + description: any; +} + +export const AgentPlan: React.FC = ({ query_plan, description }) => { + // find the planning step + const planning = query_plan.find((step): step is ModelQueryPlanningStep => step.type === "ModelQueryPlanning"); + + // collect all search query steps + const queries = query_plan.filter((step): step is AzureSearchQueryStep => step.type === "AzureSearchQuery"); + + return ( +
+ {planning && ( + + )} + +
Subqueries
+ {queries.length > 0 && ( + + + + + + + + + + {queries.map(q => ( + + + + + + ))} + +
SubqueryTotal Result CountElapsed MS
{q.query.search}{q.count}{q.elapsed_ms}
+ )} +
+ ); +}; diff --git a/app/frontend/src/components/AnalysisPanel/AnalysisPanel.module.css b/app/frontend/src/components/AnalysisPanel/AnalysisPanel.module.css index 909ac03d44..9dae51a8e8 100644 --- a/app/frontend/src/components/AnalysisPanel/AnalysisPanel.module.css +++ b/app/frontend/src/components/AnalysisPanel/AnalysisPanel.module.css @@ -1,6 +1,150 @@ .thoughtProcess { font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", monospace; word-wrap: break-word; - padding-top: 12px; - padding-bottom: 12px; + padding-top: 0.75em; + padding-bottom: 0.75em; +} + +.tList { + padding: 1.25em 1.25em 0 1.25em; + display: inline-block; + background: #e9e9e9; +} + +.tListItem { + list-style: none; + margin: auto; + margin-left: 1.25em; + min-height: 3.125em; + border-left: 0.0625em solid #123bb6; + padding: 0 0 1.875em 1.875em; + position: relative; +} + +.tListItem:last-child { + border-left: 0; +} + +.tListItem::before { + position: absolute; + left: -18px; + top: -5px; + content: " "; + border: 8px solid #d1dbfa; + border-radius: 500%; + background: #123bb6; + height: 20px; + width: 20px; +} + +.tStep { + color: #123bb6; + position: relative; + font-size: 0.875em; + margin-bottom: 0.5em; +} + +.tQuery { + color: #010207; + position: relative; + font-size: 0.875em; + margin-bottom: 0.5em; +} + +.tCodeBlock { + max-height: 18.75em; +} + +.tProp { + background-color: #d7d7d7; + color: #333232; + font-size: 0.75em; + padding: 0.1875em 0.625em; + border-radius: 0.625em; + margin-bottom: 0.5em; +} + +.citationImg { + height: 28.125rem; + max-width: 100%; + object-fit: contain; +} + +.header { + color: #123bb6; + position: relative; + font-size: 0.875em; + margin-bottom: 0.5em; +} + +.reasoningEffort { + font-size: 14px; + margin-bottom: 8px; +} + +.tokenUsageGraph { + margin: 16px 0; +} + +.tokenBar { + height: 20px; + margin-bottom: 4px; + padding-left: 4px; + font-size: 12px; + display: flex; + align-items: center; + white-space: nowrap; + overflow: hidden; +} + +.tokenBar { + height: 20px; + margin-bottom: 4px; + padding-left: 4px; + font-size: 12px; + display: flex; + flex-wrap: wrap; + align-items: center; + background-color: #d7d7d7; + white-space: nowrap; + overflow: visible; + min-width: max-content; +} + +/* Adjust tokenLabel to allow bar-specific text color overrides */ +.tokenLabel { + padding-right: 4px; +} + +.primaryBarContainer { + width: fit-content; + display: flex; + gap: 4px; +} + +.promptBar { + background-color: #a82424; + color: #ffffff; /* White text for contrast */ +} + +.reasoningBar { + background-color: #265e29; + color: #ffffff; +} + +.outputBar { + background-color: #12579b; + color: #ffffff; +} + +.totalBar { + background-color: #424242; + color: #ffffff; +} + +.subqueriesTable, +.subqueriesTable th, +.subqueriesTable td, +.subqueriesTable tr { + background: #fff; } diff --git a/app/frontend/src/components/AnalysisPanel/AnalysisPanel.tsx b/app/frontend/src/components/AnalysisPanel/AnalysisPanel.tsx index cd01a08e2f..2cee00c761 100644 --- a/app/frontend/src/components/AnalysisPanel/AnalysisPanel.tsx +++ b/app/frontend/src/components/AnalysisPanel/AnalysisPanel.tsx @@ -1,11 +1,16 @@ -import { Pivot, PivotItem } from "@fluentui/react"; -import DOMPurify from "dompurify"; - +import { Stack, Pivot, PivotItem } from "@fluentui/react"; +import { useTranslation } from "react-i18next"; import styles from "./AnalysisPanel.module.css"; import { SupportingContent } from "../SupportingContent"; -import { AskResponse } from "../../api"; +import { ChatAppResponse } from "../../api"; import { AnalysisPanelTabs } from "./AnalysisPanelTabs"; +import { ThoughtProcess } from "./ThoughtProcess"; +import { MarkdownViewer } from "../MarkdownViewer"; +import { useMsal } from "@azure/msal-react"; +import { getHeaders } from "../../api"; +import { useLogin, getToken } from "../../authConfig"; +import { useState, useEffect } from "react"; interface Props { className: string; @@ -13,17 +18,58 @@ interface Props { onActiveTabChanged: (tab: AnalysisPanelTabs) => void; activeCitation: string | undefined; citationHeight: string; - answer: AskResponse; + answer: ChatAppResponse; } const pivotItemDisabledStyle = { disabled: true, style: { color: "grey" } }; export const AnalysisPanel = ({ answer, activeTab, activeCitation, citationHeight, className, onActiveTabChanged }: Props) => { - const isDisabledThoughtProcessTab: boolean = !answer.thoughts; - const isDisabledSupportingContentTab: boolean = !answer.data_points.length; + const isDisabledThoughtProcessTab: boolean = !answer.context.thoughts; + const isDisabledSupportingContentTab: boolean = !answer.context.data_points; const isDisabledCitationTab: boolean = !activeCitation; + const [citation, setCitation] = useState(""); + + const client = useLogin ? useMsal().instance : undefined; + const { t } = useTranslation(); + + const fetchCitation = async () => { + const token = client ? await getToken(client) : undefined; + if (activeCitation) { + // Get hash from the URL as it may contain #page=N + // which helps browser PDF renderer jump to correct page N + const originalHash = activeCitation.indexOf("#") ? activeCitation.split("#")[1] : ""; + const response = await fetch(activeCitation, { + method: "GET", + headers: await getHeaders(token) + }); + const citationContent = await response.blob(); + let citationObjectUrl = URL.createObjectURL(citationContent); + // Add hash back to the new blob URL + if (originalHash) { + citationObjectUrl += "#" + originalHash; + } + setCitation(citationObjectUrl); + } + }; + useEffect(() => { + fetchCitation(); + }, []); + + const renderFileViewer = () => { + if (!activeCitation) { + return null; + } - const sanitizedThoughts = DOMPurify.sanitize(answer.thoughts!); + const fileExtension = activeCitation.split(".").pop()?.toLowerCase(); + switch (fileExtension) { + case "png": + return Citation Image; + case "md": + return ; + default: + return