diff --git a/.env.sample b/.env.sample index 4cf6eb730..42874a224 100644 --- a/.env.sample +++ b/.env.sample @@ -14,6 +14,9 @@ AZURE_SEARCH_FIELDS_TAG=tag AZURE_SEARCH_FIELDS_METADATA=metadata AZURE_SEARCH_FILENAME_COLUMN=filepath AZURE_SEARCH_TITLE_COLUMN=title +AZURE_SEARCH_SOURCE_COLUMN=source +AZURE_SEARCH_TEXT_COLUMN=text +AZURE_SEARCH_LAYOUT_TEXT_COLUMN=layoutText AZURE_SEARCH_URL_COLUMN=url AZURE_SEARCH_CONVERSATIONS_LOG_INDEX=conversations-log AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION=false @@ -60,6 +63,8 @@ AZURE_SPEECH_SERVICE_REGION= AZURE_AUTH_TYPE=keys USE_KEY_VAULT=true AZURE_KEY_VAULT_ENDPOINT= +# Application environment (e.g., dev, prod) +APP_ENV="dev" # Chat conversation type to decide between custom or byod (bring your own data) conversation type CONVERSATION_FLOW= # Chat History CosmosDB Integration Settings diff --git a/.github/workflows/broken-links-checker.yml b/.github/workflows/broken-links-checker.yml index 1270b867c..51984487e 100644 --- a/.github/workflows/broken-links-checker.yml +++ b/.github/workflows/broken-links-checker.yml @@ -20,27 +20,32 @@ jobs: with: fetch-depth: 0 - - name: Get Added/Modified Markdown Files (PR only) - id: changed-files + # For PR : Get only changed markdown files + - name: Get changed markdown files (PR only) + id: changed-markdown-files if: github.event_name == 'pull_request' - run: | - git fetch origin ${{ github.base_ref }} - files=$(git diff --name-only origin/${{ github.base_ref }}...HEAD | grep '\.md$' || true) - echo "md_files<> $GITHUB_OUTPUT - echo "$files" >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT - - name: Check Broken Links in Added/Modified Files (PR) - if: github.event_name == 'pull_request' && steps.changed-files.outputs.md_files != '' + uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46 + with: + files: | + **/*.md + + + # For PR: Check broken links only in changed files + - name: Check Broken Links in Changed Markdown Files + id: lychee-check-pr + if: github.event_name == 'pull_request' && steps.changed-markdown-files.outputs.any_changed == 'true' uses: lycheeverse/lychee-action@v2.4.1 with: args: > --verbose --exclude-mail --no-progress --exclude ^https?:// - ${{ steps.changed-files.outputs.md_files }} + ${{ steps.changed-markdown-files.outputs.all_changed_files }} failIfEmpty: false env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Check Broken Links in Entire Repo (Manual) + # For manual trigger: Check all markdown files in repo + - name: Check Broken Links in All Markdown Files in Entire Repo (Manual Trigger) + id: lychee-check-manual if: github.event_name == 'workflow_dispatch' uses: lycheeverse/lychee-action@v2.4.1 with: @@ -48,6 +53,5 @@ jobs: --verbose --exclude-mail --no-progress --exclude ^https?:// '**/*.md' failIfEmpty: false - output: lychee/out.md env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/build-docker-images.yml b/.github/workflows/build-docker-images.yml index cf5086f29..3a3ec9d4f 100644 --- a/.github/workflows/build-docker-images.yml +++ b/.github/workflows/build-docker-images.yml @@ -32,9 +32,7 @@ jobs: dockerfile: docker/Frontend.Dockerfile uses: ./.github/workflows/build-docker.yml with: - old_registry: ${{ github.ref_name == 'main' && 'fruoccopublic.azurecr.io' }} new_registry: 'cwydcontainerreg.azurecr.io' - old_username: ${{ github.ref_name == 'main' && 'fruoccopublic' }} new_username: 'cwydcontainerreg' app_name: ${{ matrix.app_name }} dockerfile: ${{ matrix.dockerfile }} diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 0ae7100e4..cc79d7de9 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -3,12 +3,6 @@ name: Reusable Docker build and push workflow on: workflow_call: inputs: - old_registry: - required: true - type: string - old_username: - required: true - type: string new_registry: required: true type: string @@ -37,15 +31,6 @@ jobs: - name: Checkout uses: actions/checkout@v4 - # Login for 'main' branch to both registries - - name: Docker Login to fruoccopublic (Main) - if: ${{ inputs.push == true && github.ref_name == 'main' }} - uses: docker/login-action@v3 - with: - registry: ${{ inputs.old_registry }} - username: ${{ inputs.old_username }} - password: ${{ secrets.DOCKER_PASSWORD }} - - name: Docker Login to cwydcontainerreg (Main) if: ${{ inputs.push == true && github.ref_name == 'main' }} uses: docker/login-action@v3 @@ -70,18 +55,6 @@ jobs: id: date run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT - - name: Build Docker Image and optionally push (Old Registry) - if: ${{ github.ref_name == 'main' }} - uses: docker/build-push-action@v6 - with: - context: . - file: ${{ inputs.dockerfile }} - push: ${{ inputs.push }} - cache-from: type=registry,ref=${{ inputs.old_registry }}/${{ inputs.app_name }}:${{ github.ref_name == 'main' && 'latest' || github.head_ref || github.ref_name }} - tags: | - ${{ inputs.old_registry }}/${{ inputs.app_name }}:${{ github.ref_name == 'main' && 'latest' || github.head_ref || 'default' }} - ${{ inputs.old_registry }}/${{ inputs.app_name }}:${{ steps.date.outputs.date }}_${{ github.run_number }} - - name: Build Docker Image and optionally push (New Registry) if: ${{ github.ref_name == 'main' || github.ref_name == 'dev' || github.ref_name == 'demo'|| github.ref_name == 'dependabotchanges' }} uses: docker/build-push-action@v6 diff --git a/.github/workflows/group_dependabot_security_updates.yml b/.github/workflows/group_dependabot_security_updates.yml new file mode 100644 index 000000000..d284f03eb --- /dev/null +++ b/.github/workflows/group_dependabot_security_updates.yml @@ -0,0 +1,267 @@ +# Workflow: Group Dependabot PRs +# Description: +# This GitHub Actions workflow automatically groups open Dependabot PRs by ecosystem (pip, npm). +# It cherry-picks individual PR changes into grouped branches, resolves merge conflicts automatically, and opens consolidated PRs. +# It also closes the original Dependabot PRs and carries over their labels and metadata. +# Improvements: +# - Handles multiple conflicting files during cherry-pick +# - Deduplicates entries in PR description +# - Avoids closing original PRs unless grouped PR creation succeeds +# - More efficient retry logic +# - Ecosystem grouping is now configurable via native YAML map +# - Uses safe namespaced branch naming (e.g. actions/grouped-...) to avoid developer conflict +# - Ensures PR body formatting uses real newlines for better readability +# - Adds strict error handling for script robustness +# - Accounts for tool dependencies (jq, gh) and race conditions +# - Optimized PR metadata lookup by preloading into associative array +# - Supports --dry-run mode for validation/testing without side effects +# - Note: PRs created during workflow execution will be picked up in the next scheduled run. + +name: Group Dependabot PRs + +on: + schedule: + - cron: '0 0 * * *' # Run daily at midnight UTC + workflow_dispatch: + inputs: + group_config_pip: + description: "Group name for pip ecosystem" + required: false + default: "backend" + group_config_npm: + description: "Group name for npm ecosystem" + required: false + default: "frontend" + group_config_yarn: + description: "Group name for yarn ecosystem" + required: false + default: "frontend" + dry_run: + description: "Run in dry-run mode (no changes will be pushed or PRs created/closed)" + required: false + default: false + type: boolean + +jobs: + group-dependabot-prs: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TARGET_BRANCH: "main" + DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }} + GROUP_CONFIG_PIP: ${{ github.event.inputs.group_config_pip || 'backend' }} + GROUP_CONFIG_NPM: ${{ github.event.inputs.group_config_npm || 'frontend' }} + GROUP_CONFIG_YARN: ${{ github.event.inputs.group_config_yarn || 'frontend' }} + steps: + - name: Checkout default branch + uses: actions/checkout@v4 + + - name: Set up Git + run: | + git config --global user.name "github-actions" + git config --global user.email "github-actions@github.com" + + - name: Install required tools + uses: awalsh128/cache-apt-pkgs-action@v1.3.1 + with: + packages: "jq gh" + + - name: Enable strict error handling + shell: bash + run: | + set -euo pipefail + + - name: Fetch open Dependabot PRs targeting main + id: fetch_prs + run: | + gh pr list \ + --search "author:dependabot[bot] base:$TARGET_BRANCH is:open" \ + --limit 100 \ + --json number,title,headRefName,labels,files,url \ + --jq '[.[] | {number, title, url, ref: .headRefName, labels: [.labels[].name], files: [.files[].path]}]' > prs.json + cat prs.json + + - name: Validate prs.json + run: | + jq empty prs.json 2> jq_error.log || { echo "Malformed JSON in prs.json: $(cat jq_error.log)"; exit 1; } + + - name: Check if any PRs exist + id: check_prs + run: | + count=$(jq length prs.json) + echo "Found $count PRs" + if [ "$count" -eq 0 ]; then + echo "No PRs to group. Exiting." + echo "skip=true" >> $GITHUB_OUTPUT + fi + + - name: Exit early if no PRs + if: steps.check_prs.outputs.skip == 'true' + run: exit 0 + + - name: Dry-run validation (CI/test only) + if: env.DRY_RUN == 'true' + run: | + echo "Running in dry-run mode. No changes will be pushed or PRs created/closed." + # Optionally, add more validation logic here (e.g., check grouped files, print planned actions). + + - name: Group PRs by ecosystem and cherry-pick with retry + run: | + declare -A GROUP_CONFIG=( + [pip]="${GROUP_CONFIG_PIP:-backend}" + [npm]="${GROUP_CONFIG_NPM:-frontend}" + [yarn]="${GROUP_CONFIG_YARN:-frontend}" + ) + mkdir -p grouped + jq -c '.[]' prs.json | while read pr; do + ref=$(echo "$pr" | jq -r '.ref') + number=$(echo "$pr" | jq -r '.number') + group="misc" + for key in "${!GROUP_CONFIG[@]}"; do + if [[ "$ref" == *"$key"* ]]; then + group="${GROUP_CONFIG[$key]}" + break + fi + done + echo "$number $ref $group" >> grouped/$group.txt + done + + shopt -s nullglob + grouped_files=(grouped/*.txt) + + if [ ${#grouped_files[@]} -eq 0 ]; then + echo "No groups were formed. Exiting." + exit 0 + fi + + declare -A pr_metadata_map + while IFS=$'\t' read -r number title url labels; do + pr_metadata_map["$number"]="$title|$url|$labels" + done < <(jq -r '.[] | "\(.number)\t\(.title)\t\(.url)\t\(.labels | join(","))"' prs.json) + + for file in "${grouped_files[@]}"; do + group_name=$(basename "$file" .txt) + # Sanitize group_name: allow only alphanum, dash, underscore + safe_group_name=$(echo "$group_name" | tr -c '[:alnum:]_-' '-') + branch_name="security/grouped-${safe_group_name}-updates" + git checkout -B "$branch_name" + + while read -r number ref group; do + git fetch origin "$ref" + if ! git cherry-pick FETCH_HEAD; then + echo "Conflict found in $ref. Attempting to resolve." + conflict_files=($(git diff --name-only --diff-filter=U)) + if [ ${#conflict_files[@]} -gt 0 ]; then + echo "Resolving conflicts in files: ${conflict_files[*]}" + for conflict_file in "${conflict_files[@]}"; do + echo "Resolving conflict in $conflict_file" + git checkout --theirs "$conflict_file" + git add "$conflict_file" + done + git cherry-pick --continue || { + echo "Failed to continue cherry-pick. Aborting." + git cherry-pick --abort + continue 2 + } + else + echo "No conflicting files found. Aborting." + git cherry-pick --abort + continue 2 + fi + fi + done < "$file" + + # Non-destructive push: check for drift before force-pushing + if [ "$DRY_RUN" == "true" ]; then + echo "[DRY-RUN] Skipping git push for $branch_name" + else + remote_hash=$(git ls-remote origin "$branch_name" | awk '{print $1}') + local_hash=$(git rev-parse "$branch_name") + if [ -n "$remote_hash" ] && [ "$remote_hash" != "$local_hash" ]; then + echo "Remote branch $branch_name has diverged. Skipping force-push to avoid overwriting changes." + continue + fi + git push --force-with-lease origin "$branch_name" + fi + + new_lines="" + while read -r number ref group; do + IFS="|" read -r title url _ <<< "${pr_metadata_map["$number"]}" + new_lines+="$title - [#$number]($url)\n" + done < "$file" + + pr_title="chore(deps): bump grouped $group_name Dependabot updates" + # Add --state open to ensure only open PRs are considered + existing_url=$(gh pr list --head "$branch_name" --base "$TARGET_BRANCH" --state open --json url --jq '.[0].url // empty') + + if [ -n "$existing_url" ]; then + echo "PR already exists: $existing_url" + pr_url="$existing_url" + current_body=$(gh pr view "$pr_url" --json body --jq .body) + # Simplified duplicate-detection using Bash array + IFS=$'\n' read -d '' -r -a current_lines < <(printf '%s\0' "$current_body") + IFS=$'\n' read -d '' -r -a new_lines_arr < <(printf '%b\0' "$new_lines") + declare -A seen + for line in "${current_lines[@]}"; do + seen["$line"]=1 + done + filtered_lines="" + for line in "${new_lines_arr[@]}"; do + if [[ -n "$line" && -z "${seen["$line"]}" ]]; then + filtered_lines+="$line\n" + fi + done + # Ensure a newline separator between the existing body and new lines + if [ -n "$filtered_lines" ]; then + new_body="$current_body"$'\n'"$filtered_lines" + else + new_body="$current_body" + fi + if [ "$DRY_RUN" == "true" ]; then + echo "[DRY-RUN] Would update PR body for $pr_url" + else + tmpfile=$(mktemp) + printf '%s' "$new_body" > "$tmpfile" + gh pr edit "$pr_url" --body-file "$tmpfile" + rm -f "$tmpfile" + fi + else + pr_body=$(printf "This PR groups multiple open PRs by Dependabot for %s.\n\n%b" "$group_name" "$new_lines") + if [ "$DRY_RUN" == "true" ]; then + echo "[DRY-RUN] Would create PR titled: $pr_title" + echo "$pr_body" + pr_url="" + else + pr_url=$(gh pr create \ + --title "$pr_title" \ + --body "$pr_body" \ + --base "$TARGET_BRANCH" \ + --head "$branch_name") + fi + fi + + if [ -n "$pr_url" ]; then + for number in $(cut -d ' ' -f1 "$file"); do + IFS="|" read -r _ _ labels <<< "${pr_metadata_map["$number"]}" + IFS="," read -ra label_arr <<< "$labels" + for label in "${label_arr[@]}"; do + if [ "$DRY_RUN" == "true" ]; then + echo "[DRY-RUN] Would add label $label to $pr_url" + else + gh pr edit "$pr_url" --add-label "$label" + fi + done + if [ "$DRY_RUN" == "true" ]; then + echo "[DRY-RUN] Would close PR #$number" + else + gh pr close "$number" --comment "Grouped into $pr_url." + fi + done + echo "Grouped PR created. Leaving branch $branch_name for now." + else + echo "Grouped PR was not created. Skipping closing of original PRs." + fi + done diff --git a/README.md b/README.md index 794067e5d..4d36ebf71 100644 --- a/README.md +++ b/README.md @@ -182,6 +182,12 @@ To review Cosmos DB configuration overview and steps, follow the link [here](doc ![Solution Architecture - Chat with your data CosmosDB](/docs/images/architecture_cdb.png) ### Deploy instructions +
+ +> ⚠️ **Important: Check Azure OpenAI Quota Availability** +
To ensure sufficient quota is available in your subscription, please follow [quota check instructions guide](./docs/QuotaCheck.md) before you deploy the solution. + +
The "Deploy to Azure" button offers a one-click deployment where you don’t have to clone the code. If you would like a developer experience instead, follow the [local deployment instructions](./docs/LOCAL_DEPLOYMENT.md). Once you deploy to Azure, you will have the option to select PostgreSQL or Cosmos DB, see screenshot below. diff --git a/code/backend/batch/utilities/chat_history/database_factory.py b/code/backend/batch/utilities/chat_history/database_factory.py index 980c2cf82..5482581c6 100644 --- a/code/backend/batch/utilities/chat_history/database_factory.py +++ b/code/backend/batch/utilities/chat_history/database_factory.py @@ -2,7 +2,7 @@ from ..helpers.env_helper import EnvHelper from .cosmosdb import CosmosConversationClient from .postgresdbservice import PostgresConversationClient -from azure.identity import DefaultAzureCredential +from ..helpers.azure_credential_utils import get_azure_credential from ..helpers.config.database_type import DatabaseType @@ -25,7 +25,7 @@ def get_conversation_client(): f"https://{env_helper.AZURE_COSMOSDB_ACCOUNT}.documents.azure.com:443/" ) credential = ( - DefaultAzureCredential() + get_azure_credential() if not env_helper.AZURE_COSMOSDB_ACCOUNT_KEY else env_helper.AZURE_COSMOSDB_ACCOUNT_KEY ) diff --git a/code/backend/batch/utilities/chat_history/postgresdbservice.py b/code/backend/batch/utilities/chat_history/postgresdbservice.py index a758bb20c..bb53fb190 100644 --- a/code/backend/batch/utilities/chat_history/postgresdbservice.py +++ b/code/backend/batch/utilities/chat_history/postgresdbservice.py @@ -1,7 +1,7 @@ import logging import asyncpg from datetime import datetime, timezone -from azure.identity import DefaultAzureCredential +from ..helpers.azure_credential_utils import get_azure_credential from .database_client_base import DatabaseClientBase @@ -21,7 +21,7 @@ def __init__( async def connect(self): try: - credential = DefaultAzureCredential() + credential = get_azure_credential() token = credential.get_token( "https://ossrdbms-aad.database.windows.net/.default" ).token diff --git a/code/backend/batch/utilities/helpers/azure_blob_storage_client.py b/code/backend/batch/utilities/helpers/azure_blob_storage_client.py index fe53dfd23..39b41a9de 100644 --- a/code/backend/batch/utilities/helpers/azure_blob_storage_client.py +++ b/code/backend/batch/utilities/helpers/azure_blob_storage_client.py @@ -12,7 +12,7 @@ from azure.storage.queue import QueueClient, BinaryBase64EncodePolicy import chardet from .env_helper import EnvHelper -from azure.identity import DefaultAzureCredential +from .azure_credential_utils import get_azure_credential def connection_string(account_name: str, account_key: str): @@ -25,7 +25,7 @@ def create_queue_client(): return QueueClient( account_url=f"https://{env_helper.AZURE_BLOB_ACCOUNT_NAME}.queue.core.windows.net/", queue_name=env_helper.DOCUMENT_PROCESSING_QUEUE_NAME, - credential=DefaultAzureCredential(), + credential=get_azure_credential(), message_encode_policy=BinaryBase64EncodePolicy(), ) @@ -56,7 +56,7 @@ def __init__( if self.auth_type == "rbac": self.account_key = None self.blob_service_client = BlobServiceClient( - account_url=self.endpoint, credential=DefaultAzureCredential() + account_url=self.endpoint, credential=get_azure_credential() ) self.user_delegation_key = self.request_user_delegation_key( blob_service_client=self.blob_service_client diff --git a/code/backend/batch/utilities/helpers/azure_computer_vision_client.py b/code/backend/batch/utilities/helpers/azure_computer_vision_client.py index 6ab0733f3..d838f9d9e 100644 --- a/code/backend/batch/utilities/helpers/azure_computer_vision_client.py +++ b/code/backend/batch/utilities/helpers/azure_computer_vision_client.py @@ -1,6 +1,7 @@ import logging from urllib.parse import urljoin -from azure.identity import DefaultAzureCredential, get_bearer_token_provider +from azure.identity import get_bearer_token_provider +from .azure_credential_utils import get_azure_credential import requests from requests import Response @@ -56,7 +57,7 @@ def __make_request(self, path: str, body) -> Response: headers["Ocp-Apim-Subscription-Key"] = self.key else: token_provider = get_bearer_token_provider( - DefaultAzureCredential(), self.__TOKEN_SCOPE + get_azure_credential(), self.__TOKEN_SCOPE ) headers["Authorization"] = "Bearer " + token_provider() diff --git a/code/backend/batch/utilities/helpers/azure_credential_utils.py b/code/backend/batch/utilities/helpers/azure_credential_utils.py new file mode 100644 index 000000000..e8d9d7051 --- /dev/null +++ b/code/backend/batch/utilities/helpers/azure_credential_utils.py @@ -0,0 +1,48 @@ +import os +from azure.identity import ManagedIdentityCredential, DefaultAzureCredential +from azure.identity.aio import ( + ManagedIdentityCredential as AioManagedIdentityCredential, + DefaultAzureCredential as AioDefaultAzureCredential, +) + + +async def get_azure_credential_async(client_id=None): + """ + Returns an Azure credential asynchronously based on the application environment. + + If the environment is 'dev', it uses AioDefaultAzureCredential. + Otherwise, it uses AioManagedIdentityCredential. + + Args: + client_id (str, optional): The client ID for the Managed Identity Credential. + + Returns: + Credential object: Either AioDefaultAzureCredential or AioManagedIdentityCredential. + """ + if os.getenv("APP_ENV", "prod").lower() == "dev": + return ( + AioDefaultAzureCredential() + ) # CodeQL [SM05139] Okay use of DefaultAzureCredential as it is only used in development + else: + return AioManagedIdentityCredential(client_id=client_id) + + +def get_azure_credential(client_id=None): + """ + Returns an Azure credential based on the application environment. + + If the environment is 'dev', it uses DefaultAzureCredential. + Otherwise, it uses ManagedIdentityCredential. + + Args: + client_id (str, optional): The client ID for the Managed Identity Credential. + + Returns: + Credential object: Either DefaultAzureCredential or ManagedIdentityCredential. + """ + if os.getenv("APP_ENV", "prod").lower() == "dev": + return ( + DefaultAzureCredential() + ) # CodeQL [SM05139] Okay use of DefaultAzureCredential as it is only used in development + else: + return ManagedIdentityCredential(client_id=client_id) diff --git a/code/backend/batch/utilities/helpers/azure_form_recognizer_helper.py b/code/backend/batch/utilities/helpers/azure_form_recognizer_helper.py index 5abb54d15..a00f07340 100644 --- a/code/backend/batch/utilities/helpers/azure_form_recognizer_helper.py +++ b/code/backend/batch/utilities/helpers/azure_form_recognizer_helper.py @@ -1,7 +1,7 @@ import logging from azure.core.credentials import AzureKeyCredential from azure.ai.formrecognizer import DocumentAnalysisClient -from azure.identity import DefaultAzureCredential +from .azure_credential_utils import get_azure_credential import html import traceback from .env_helper import EnvHelper @@ -19,7 +19,7 @@ def __init__(self) -> None: if env_helper.AZURE_AUTH_TYPE == "rbac": self.document_analysis_client = DocumentAnalysisClient( endpoint=self.AZURE_FORM_RECOGNIZER_ENDPOINT, - credential=DefaultAzureCredential(), + credential=get_azure_credential(), headers={ "x-ms-useragent": "chat-with-your-data-solution-accelerator/1.0.0" }, diff --git a/code/backend/batch/utilities/helpers/azure_postgres_helper.py b/code/backend/batch/utilities/helpers/azure_postgres_helper.py index 674ba166a..d5ca7263f 100644 --- a/code/backend/batch/utilities/helpers/azure_postgres_helper.py +++ b/code/backend/batch/utilities/helpers/azure_postgres_helper.py @@ -1,7 +1,7 @@ import logging import psycopg2 from psycopg2.extras import execute_values, RealDictCursor -from azure.identity import DefaultAzureCredential +from .azure_credential_utils import get_azure_credential from .llm_helper import LLMHelper from .env_helper import EnvHelper @@ -24,7 +24,7 @@ def _create_search_client(self): dbname = self.env_helper.POSTGRESQL_DATABASE # Acquire the access token - credential = DefaultAzureCredential() + credential = get_azure_credential() access_token = credential.get_token( "https://ossrdbms-aad.database.windows.net/.default" ) diff --git a/code/backend/batch/utilities/helpers/azure_search_helper.py b/code/backend/batch/utilities/helpers/azure_search_helper.py index c0314645d..090494e50 100644 --- a/code/backend/batch/utilities/helpers/azure_search_helper.py +++ b/code/backend/batch/utilities/helpers/azure_search_helper.py @@ -2,7 +2,7 @@ from typing import Union from langchain_community.vectorstores import AzureSearch from azure.core.credentials import AzureKeyCredential -from azure.identity import DefaultAzureCredential +from .azure_credential_utils import get_azure_credential from azure.search.documents import SearchClient from azure.search.documents.indexes import SearchIndexClient from azure.search.documents.indexes.models import ( @@ -49,10 +49,10 @@ def _search_credential(self): if self.env_helper.is_auth_type_keys(): return AzureKeyCredential(self.env_helper.AZURE_SEARCH_KEY) else: - return DefaultAzureCredential() + return get_azure_credential() def _create_search_client( - self, search_credential: Union[AzureKeyCredential, DefaultAzureCredential] + self, search_credential: Union[AzureKeyCredential, get_azure_credential] ) -> SearchClient: return SearchClient( endpoint=self.env_helper.AZURE_SEARCH_SERVICE, @@ -61,7 +61,7 @@ def _create_search_client( ) def _create_search_index_client( - self, search_credential: Union[AzureKeyCredential, DefaultAzureCredential] + self, search_credential: Union[AzureKeyCredential, get_azure_credential] ): return SearchIndexClient( endpoint=self.env_helper.AZURE_SEARCH_SERVICE, credential=search_credential @@ -132,6 +132,25 @@ def create_index(self): filterable=True, ), ] + if self.env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: + logger.info("Adding 'text' field for integrated vectorization.") + fields.append( + SearchableField( + name=self.env_helper.AZURE_SEARCH_TEXT_COLUMN, + type=SearchFieldDataType.String, + filterable=False, + sortable=False, + ) + ) + logger.info("Adding 'layoutText' field for integrated vectorization.") + fields.append( + SearchableField( + name=self.env_helper.AZURE_SEARCH_LAYOUT_TEXT_COLUMN, + type=SearchFieldDataType.String, + filterable=False, + sortable=False, + ) + ) if self.env_helper.USE_ADVANCED_IMAGE_PROCESSING: logger.info("Adding image_vector field to index") @@ -266,7 +285,7 @@ def get_conversation_logger(self): ] if self.env_helper.AZURE_AUTH_TYPE == "rbac": - credential = DefaultAzureCredential() + credential = get_azure_credential() return AzureSearch( azure_search_endpoint=self.env_helper.AZURE_SEARCH_SERVICE, azure_search_key=None, # Remove API key @@ -274,7 +293,7 @@ def get_conversation_logger(self): embedding_function=self.llm_helper.get_embedding_model().embed_query, fields=fields, user_agent="langchain chatwithyourdata-sa", - credential=credential # Add token credential or send none so it is auto handled by AzureSearch library + credential=credential, # Add token credential or send none so it is auto handled by AzureSearch library ) else: return AzureSearch( diff --git a/code/backend/batch/utilities/helpers/env_helper.py b/code/backend/batch/utilities/helpers/env_helper.py index 58ad0b484..390f7b48c 100644 --- a/code/backend/batch/utilities/helpers/env_helper.py +++ b/code/backend/batch/utilities/helpers/env_helper.py @@ -3,7 +3,8 @@ import logging import threading from dotenv import load_dotenv -from azure.identity import DefaultAzureCredential, get_bearer_token_provider +from azure.identity import get_bearer_token_provider +from .azure_credential_utils import get_azure_credential from azure.keyvault.secrets import SecretClient from ..orchestrator.orchestration_strategy import OrchestrationStrategy @@ -76,6 +77,10 @@ def __load_config(self, **kwargs) -> None: self.AZURE_SEARCH_SOURCE_COLUMN = os.getenv( "AZURE_SEARCH_SOURCE_COLUMN", "source" ) + self.AZURE_SEARCH_TEXT_COLUMN = os.getenv("AZURE_SEARCH_TEXT_COLUMN", "text") + self.AZURE_SEARCH_LAYOUT_TEXT_COLUMN = os.getenv( + "AZURE_SEARCH_LAYOUT_TEXT_COLUMN", "layoutText" + ) self.AZURE_SEARCH_CHUNK_COLUMN = os.getenv("AZURE_SEARCH_CHUNK_COLUMN", "chunk") self.AZURE_SEARCH_OFFSET_COLUMN = os.getenv( "AZURE_SEARCH_OFFSET_COLUMN", "offset" @@ -173,9 +178,7 @@ def __load_config(self, **kwargs) -> None: self.AZURE_OPENAI_MODEL_NAME = azure_openai_model_info.get("modelName", "") else: # Otherwise, fallback to individual environment variables - self.AZURE_OPENAI_MODEL = os.getenv( - "AZURE_OPENAI_MODEL", "gpt-4.1" - ) + self.AZURE_OPENAI_MODEL = os.getenv("AZURE_OPENAI_MODEL", "gpt-4.1") self.AZURE_OPENAI_MODEL_NAME = os.getenv( "AZURE_OPENAI_MODEL_NAME", "gpt-4.1" ) @@ -214,7 +217,7 @@ def __load_config(self, **kwargs) -> None: ) self.AZURE_TOKEN_PROVIDER = get_bearer_token_provider( - DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default" + get_azure_credential(), "https://cognitiveservices.azure.com/.default" ) self.ADVANCED_IMAGE_PROCESSING_MAX_IMAGES = self.get_env_var_int( "ADVANCED_IMAGE_PROCESSING_MAX_IMAGES", 1 @@ -359,8 +362,8 @@ def __load_config(self, **kwargs) -> None: self.OPEN_AI_FUNCTIONS_SYSTEM_PROMPT = os.getenv( "OPEN_AI_FUNCTIONS_SYSTEM_PROMPT", "" ) - self.SEMENTIC_KERNEL_SYSTEM_PROMPT = os.getenv( - "SEMENTIC_KERNEL_SYSTEM_PROMPT", "" + self.SEMANTIC_KERNEL_SYSTEM_PROMPT = os.getenv( + "SEMANTIC_KERNEL_SYSTEM_PROMPT", "" ) self.ENFORCE_AUTH = self.get_env_var_bool("ENFORCE_AUTH", "True") @@ -414,7 +417,7 @@ def __init__(self) -> None: The constructor sets the USE_KEY_VAULT attribute based on the value of the USE_KEY_VAULT environment variable. If USE_KEY_VAULT is set to "true" (case-insensitive), it initializes a SecretClient object using the - AZURE_KEY_VAULT_ENDPOINT environment variable and the DefaultAzureCredential. + AZURE_KEY_VAULT_ENDPOINT environment variable and the get_azure_credential. Args: None @@ -426,7 +429,7 @@ def __init__(self) -> None: self.secret_client = None if self.USE_KEY_VAULT: self.secret_client = SecretClient( - os.environ.get("AZURE_KEY_VAULT_ENDPOINT"), DefaultAzureCredential() + os.environ.get("AZURE_KEY_VAULT_ENDPOINT"), get_azure_credential() ) def get_secret(self, secret_name: str) -> str: diff --git a/code/backend/batch/utilities/helpers/llm_helper.py b/code/backend/batch/utilities/helpers/llm_helper.py index 7517fb575..3edd0913c 100644 --- a/code/backend/batch/utilities/helpers/llm_helper.py +++ b/code/backend/batch/utilities/helpers/llm_helper.py @@ -8,7 +8,7 @@ AzureChatPromptExecutionSettings, ) from azure.ai.ml import MLClient -from azure.identity import DefaultAzureCredential +from .azure_credential_utils import get_azure_credential from .env_helper import EnvHelper logger = logging.getLogger(__name__) @@ -166,7 +166,7 @@ def get_sk_service_settings(self, service: AzureChatCompletion): def get_ml_client(self): if not hasattr(self, "_ml_client"): self._ml_client = MLClient( - DefaultAzureCredential(), + get_azure_credential(), self.env_helper.AZURE_SUBSCRIPTION_ID, self.env_helper.AZURE_RESOURCE_GROUP, self.env_helper.AZURE_ML_WORKSPACE_NAME, diff --git a/code/backend/batch/utilities/integrated_vectorization/azure_search_datasource.py b/code/backend/batch/utilities/integrated_vectorization/azure_search_datasource.py index 60ab35729..af4931998 100644 --- a/code/backend/batch/utilities/integrated_vectorization/azure_search_datasource.py +++ b/code/backend/batch/utilities/integrated_vectorization/azure_search_datasource.py @@ -7,7 +7,7 @@ ) from azure.search.documents.indexes import SearchIndexerClient from ..helpers.env_helper import EnvHelper -from azure.identity import DefaultAzureCredential +from ..helpers.azure_credential_utils import get_azure_credential from azure.core.credentials import AzureKeyCredential @@ -19,7 +19,7 @@ def __init__(self, env_helper: EnvHelper): ( AzureKeyCredential(self.env_helper.AZURE_SEARCH_KEY) if self.env_helper.is_auth_type_keys() - else DefaultAzureCredential() + else get_azure_credential() ), ) diff --git a/code/backend/batch/utilities/integrated_vectorization/azure_search_index.py b/code/backend/batch/utilities/integrated_vectorization/azure_search_index.py index 413082460..8c95b927e 100644 --- a/code/backend/batch/utilities/integrated_vectorization/azure_search_index.py +++ b/code/backend/batch/utilities/integrated_vectorization/azure_search_index.py @@ -21,7 +21,7 @@ SearchIndex, ) from ..helpers.env_helper import EnvHelper -from azure.identity import DefaultAzureCredential +from ..helpers.azure_credential_utils import get_azure_credential from azure.core.credentials import AzureKeyCredential from ..helpers.llm_helper import LLMHelper @@ -39,7 +39,7 @@ def __init__(self, env_helper: EnvHelper, llm_helper: LLMHelper): ( AzureKeyCredential(self.env_helper.AZURE_SEARCH_KEY) if self.env_helper.is_auth_type_keys() - else DefaultAzureCredential() + else get_azure_credential() ), ) @@ -99,6 +99,26 @@ def create_or_update_index(self): ), ] + if self.env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: + logger.info("Adding `text` field for integrated vectorization.") + fields.append( + SearchableField( + name="text", + type=SearchFieldDataType.String, + filterable=False, + sortable=False, + ) + ) + logger.info("Adding `layoutText` field for integrated vectorization.") + fields.append( + SearchableField( + name="layoutText", + type=SearchFieldDataType.String, + filterable=False, + sortable=False, + ) + ) + vector_search = self.get_vector_search_config() semantic_search = self.get_semantic_search_config() diff --git a/code/backend/batch/utilities/integrated_vectorization/azure_search_indexer.py b/code/backend/batch/utilities/integrated_vectorization/azure_search_indexer.py index 9be9fb858..20e1fe8a2 100644 --- a/code/backend/batch/utilities/integrated_vectorization/azure_search_indexer.py +++ b/code/backend/batch/utilities/integrated_vectorization/azure_search_indexer.py @@ -2,7 +2,7 @@ from azure.search.documents.indexes.models import SearchIndexer, FieldMapping from azure.search.documents.indexes import SearchIndexerClient from ..helpers.env_helper import EnvHelper -from azure.identity import DefaultAzureCredential +from ..helpers.azure_credential_utils import get_azure_credential from azure.core.credentials import AzureKeyCredential logger = logging.getLogger(__name__) @@ -16,7 +16,7 @@ def __init__(self, env_helper: EnvHelper): ( AzureKeyCredential(self.env_helper.AZURE_SEARCH_KEY) if self.env_helper.is_auth_type_keys() - else DefaultAzureCredential() + else get_azure_credential() ), ) diff --git a/code/backend/batch/utilities/integrated_vectorization/azure_search_skillset.py b/code/backend/batch/utilities/integrated_vectorization/azure_search_skillset.py index 622fa3152..ec1fdac40 100644 --- a/code/backend/batch/utilities/integrated_vectorization/azure_search_skillset.py +++ b/code/backend/batch/utilities/integrated_vectorization/azure_search_skillset.py @@ -15,7 +15,7 @@ from azure.search.documents.indexes import SearchIndexerClient from ..helpers.config.config_helper import IntegratedVectorizationConfig from ..helpers.env_helper import EnvHelper -from azure.identity import DefaultAzureCredential +from ..helpers.azure_credential_utils import get_azure_credential from azure.core.credentials import AzureKeyCredential logger = logging.getLogger(__name__) @@ -33,7 +33,7 @@ def __init__( ( AzureKeyCredential(self.env_helper.AZURE_SEARCH_KEY) if self.env_helper.is_auth_type_keys() - else DefaultAzureCredential() + else get_azure_credential() ), ) self.integrated_vectorization_config = integrated_vectorization_config diff --git a/code/backend/batch/utilities/orchestrator/semantic_kernel.py b/code/backend/batch/utilities/orchestrator/semantic_kernel.py index 8cc743c0d..44bc57057 100644 --- a/code/backend/batch/utilities/orchestrator/semantic_kernel.py +++ b/code/backend/batch/utilities/orchestrator/semantic_kernel.py @@ -41,7 +41,7 @@ async def orchestrate( if response := self.call_content_safety_input(user_message): return response - system_message = self.env_helper.SEMENTIC_KERNEL_SYSTEM_PROMPT + system_message = self.env_helper.SEMANTIC_KERNEL_SYSTEM_PROMPT if not system_message: system_message = """You help employees to navigate only private information sources. You must prioritize the function call over your general knowledge for any question by calling the search_documents function. diff --git a/code/backend/batch/utilities/search/integrated_vectorization_search_handler.py b/code/backend/batch/utilities/search/integrated_vectorization_search_handler.py index d9470a6a0..5179e4c6c 100644 --- a/code/backend/batch/utilities/search/integrated_vectorization_search_handler.py +++ b/code/backend/batch/utilities/search/integrated_vectorization_search_handler.py @@ -5,7 +5,7 @@ from azure.search.documents.indexes import SearchIndexClient from azure.search.documents.models import VectorizableTextQuery from azure.core.credentials import AzureKeyCredential -from azure.identity import DefaultAzureCredential +from ..helpers.azure_credential_utils import get_azure_credential from ..common.source_document import SourceDocument import re @@ -21,7 +21,7 @@ def create_search_client(self): credential=( AzureKeyCredential(self.env_helper.AZURE_SEARCH_KEY) if self.env_helper.is_auth_type_keys() - else DefaultAzureCredential() + else get_azure_credential() ), ) @@ -170,7 +170,7 @@ def _check_index_exists(self) -> bool: credential=( AzureKeyCredential(self.env_helper.AZURE_SEARCH_KEY) if self.env_helper.is_auth_type_keys() - else DefaultAzureCredential() + else get_azure_credential() ), ) diff --git a/code/backend/batch/utilities/tools/content_safety_checker.py b/code/backend/batch/utilities/tools/content_safety_checker.py index efba3a4c4..89ab1c30a 100644 --- a/code/backend/batch/utilities/tools/content_safety_checker.py +++ b/code/backend/batch/utilities/tools/content_safety_checker.py @@ -1,7 +1,7 @@ import logging from azure.ai.contentsafety import ContentSafetyClient from azure.core.credentials import AzureKeyCredential -from azure.identity import DefaultAzureCredential +from ..helpers.azure_credential_utils import get_azure_credential from azure.core.exceptions import HttpResponseError from azure.ai.contentsafety.models import AnalyzeTextOptions from ..helpers.env_helper import EnvHelper @@ -19,7 +19,7 @@ def __init__(self): logger.info("Initializing ContentSafetyClient with RBAC authentication.") self.content_safety_client = ContentSafetyClient( env_helper.AZURE_CONTENT_SAFETY_ENDPOINT, - DefaultAzureCredential(), + get_azure_credential(), ) else: logger.info( diff --git a/code/create_app.py b/code/create_app.py index c32da6cec..e1cade2ab 100644 --- a/code/create_app.py +++ b/code/create_app.py @@ -22,7 +22,7 @@ from backend.batch.utilities.helpers.config.conversation_flow import ConversationFlow from backend.api.chat_history import bp_chat_history_response from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient -from azure.identity import DefaultAzureCredential +from backend.batch.utilities.helpers.azure_credential_utils import get_azure_credential from backend.batch.utilities.helpers.azure_blob_storage_client import ( AzureBlobStorageClient, ) @@ -376,7 +376,7 @@ def get_speech_key(env_helper: EnvHelper): This is required to generate short-lived tokens when using RBAC. """ client = CognitiveServicesManagementClient( - credential=DefaultAzureCredential(), + credential=get_azure_credential(), subscription_id=env_helper.AZURE_SUBSCRIPTION_ID, ) keys = client.accounts.list_keys( diff --git a/code/tests/chat_history/test_database_factory.py b/code/tests/chat_history/test_database_factory.py index 0a1734171..a487fb8a5 100644 --- a/code/tests/chat_history/test_database_factory.py +++ b/code/tests/chat_history/test_database_factory.py @@ -8,7 +8,7 @@ ) -@patch("backend.batch.utilities.chat_history.database_factory.DefaultAzureCredential") +@patch("backend.batch.utilities.chat_history.database_factory.get_azure_credential") @patch("backend.batch.utilities.chat_history.database_factory.EnvHelper") @patch( "backend.batch.utilities.chat_history.database_factory.CosmosConversationClient", @@ -50,7 +50,7 @@ def test_get_conversation_client_cosmos( assert client == mock_cosmos_instance -@patch("backend.batch.utilities.chat_history.database_factory.DefaultAzureCredential") +@patch("backend.batch.utilities.chat_history.database_factory.get_azure_credential") @patch("backend.batch.utilities.chat_history.database_factory.EnvHelper") @patch( "backend.batch.utilities.chat_history.database_factory.PostgresConversationClient", diff --git a/code/tests/chat_history/test_postgresdbservice.py b/code/tests/chat_history/test_postgresdbservice.py index e160e4a7b..7710f423e 100644 --- a/code/tests/chat_history/test_postgresdbservice.py +++ b/code/tests/chat_history/test_postgresdbservice.py @@ -21,10 +21,10 @@ def mock_connection(): @patch("backend.batch.utilities.chat_history.postgresdbservice.asyncpg.connect") -@patch("backend.batch.utilities.chat_history.postgresdbservice.DefaultAzureCredential") +@patch("backend.batch.utilities.chat_history.postgresdbservice.get_azure_credential") @pytest.mark.asyncio async def test_connect(mock_credential, mock_connect, postgres_client, mock_connection): - # Mock DefaultAzureCredential + # Mock get_azure_credential mock_credential.return_value.get_token.return_value.token = "mock_token" # Mock asyncpg connection diff --git a/code/tests/functional/tests/functions/integrated_vectorization/test_integrated_vectorization_resource_creation.py b/code/tests/functional/tests/functions/integrated_vectorization/test_integrated_vectorization_resource_creation.py index 32be05562..99f0c42e4 100644 --- a/code/tests/functional/tests/functions/integrated_vectorization/test_integrated_vectorization_resource_creation.py +++ b/code/tests/functional/tests/functions/integrated_vectorization/test_integrated_vectorization_resource_creation.py @@ -99,7 +99,9 @@ def test_integrated_vectorization_datasouce_created( "credentials": { "connectionString": f"DefaultEndpointsProtocol=https;AccountName={app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','accountName')};AccountKey={app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','accountKey')};EndpointSuffix=core.windows.net" }, - "container": {"name": f"{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}"}, + "container": { + "name": f"{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}" + }, "dataDeletionDetectionPolicy": { "@odata.type": "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy" }, @@ -210,6 +212,26 @@ def test_integrated_vectorization_index_created( "facetable": True, "analyzer": "keyword", }, + { + "name": "text", + "type": "Edm.String", + "key": False, + "retrievable": True, + "searchable": True, + "filterable": False, + "sortable": False, + "facetable": False, + }, + { + "name": "layoutText", + "type": "Edm.String", + "key": False, + "retrievable": True, + "searchable": True, + "filterable": False, + "sortable": False, + "facetable": False, + }, ], "semantic": { "configurations": [ diff --git a/code/tests/test_app.py b/code/tests/test_app.py index e6ff82c34..0ffa9af9e 100644 --- a/code/tests/test_app.py +++ b/code/tests/test_app.py @@ -25,6 +25,9 @@ AZURE_SEARCH_CONTENT_COLUMN = "field1|field2" AZURE_SEARCH_CONTENT_VECTOR_COLUMN = "vector-column" AZURE_SEARCH_TITLE_COLUMN = "title" +AZURE_SEARCH_SOURCE_COLUMN = "source" +AZURE_SEARCH_TEXT_COLUMN = "text" +AZURE_SEARCH_LAYOUT_TEXT_COLUMN = "layoutText" AZURE_SEARCH_FILENAME_COLUMN = "filename" AZURE_SEARCH_URL_COLUMN = "metadata" AZURE_SEARCH_FILTER = "filter" @@ -73,6 +76,9 @@ def env_helper_mock(): AZURE_SEARCH_CONTENT_VECTOR_COLUMN ) env_helper.AZURE_SEARCH_TITLE_COLUMN = AZURE_SEARCH_TITLE_COLUMN + env_helper.AZURE_SEARCH_SOURCE_COLUMN = AZURE_SEARCH_SOURCE_COLUMN + env_helper.AZURE_SEARCH_TEXT_COLUMN = AZURE_SEARCH_TEXT_COLUMN + env_helper.AZURE_SEARCH_LAYOUT_TEXT_COLUMN = AZURE_SEARCH_LAYOUT_TEXT_COLUMN env_helper.AZURE_SEARCH_FILENAME_COLUMN = AZURE_SEARCH_FILENAME_COLUMN env_helper.AZURE_SEARCH_URL_COLUMN = AZURE_SEARCH_URL_COLUMN env_helper.AZURE_SEARCH_FILTER = AZURE_SEARCH_FILTER diff --git a/code/tests/utilities/helpers/test_azure_computer_vision_client.py b/code/tests/utilities/helpers/test_azure_computer_vision_client.py index ff8c70b9e..506542028 100644 --- a/code/tests/utilities/helpers/test_azure_computer_vision_client.py +++ b/code/tests/utilities/helpers/test_azure_computer_vision_client.py @@ -94,7 +94,7 @@ def test_vectorize_image_calls_computer_vision_with_key_based_authentication( @mock.patch( - "backend.batch.utilities.helpers.azure_computer_vision_client.DefaultAzureCredential" + "backend.batch.utilities.helpers.azure_computer_vision_client.get_azure_credential" ) @mock.patch( "backend.batch.utilities.helpers.azure_computer_vision_client.get_bearer_token_provider" diff --git a/code/tests/utilities/helpers/test_azure_credential_utils.py b/code/tests/utilities/helpers/test_azure_credential_utils.py new file mode 100644 index 000000000..879fc7146 --- /dev/null +++ b/code/tests/utilities/helpers/test_azure_credential_utils.py @@ -0,0 +1,105 @@ +import sys +import os +from unittest.mock import patch, MagicMock +import pytest +import backend.batch.utilities.helpers.azure_credential_utils as azure_credential_utils + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))) + +# Synchronous tests + + +@patch("backend.batch.utilities.helpers.azure_credential_utils.os.getenv") +@patch("backend.batch.utilities.helpers.azure_credential_utils.DefaultAzureCredential") +@patch( + "backend.batch.utilities.helpers.azure_credential_utils.ManagedIdentityCredential" +) +def test_get_azure_credential_dev_env( + mock_managed_identity_credential, mock_default_azure_credential, mock_getenv +): + """Test get_azure_credential in dev environment.""" + mock_getenv.return_value = "dev" + mock_default_credential = MagicMock() + mock_default_azure_credential.return_value = mock_default_credential + + credential = azure_credential_utils.get_azure_credential() + + mock_getenv.assert_called_once_with("APP_ENV", "prod") + mock_default_azure_credential.assert_called_once() + mock_managed_identity_credential.assert_not_called() + assert credential == mock_default_credential + + +@patch("backend.batch.utilities.helpers.azure_credential_utils.os.getenv") +@patch("backend.batch.utilities.helpers.azure_credential_utils.DefaultAzureCredential") +@patch( + "backend.batch.utilities.helpers.azure_credential_utils.ManagedIdentityCredential" +) +def test_get_azure_credential_non_dev_env( + mock_managed_identity_credential, mock_default_azure_credential, mock_getenv +): + """Test get_azure_credential in non-dev environment.""" + mock_getenv.return_value = "prod" + mock_managed_credential = MagicMock() + mock_managed_identity_credential.return_value = mock_managed_credential + credential = azure_credential_utils.get_azure_credential(client_id="test-client-id") + + mock_getenv.assert_called_once_with("APP_ENV", "prod") + mock_managed_identity_credential.assert_called_once_with(client_id="test-client-id") + mock_default_azure_credential.assert_not_called() + assert credential == mock_managed_credential + + +# Asynchronous tests + + +@pytest.mark.asyncio +@patch("backend.batch.utilities.helpers.azure_credential_utils.os.getenv") +@patch( + "backend.batch.utilities.helpers.azure_credential_utils.AioDefaultAzureCredential" +) +@patch( + "backend.batch.utilities.helpers.azure_credential_utils.AioManagedIdentityCredential" +) +async def test_get_azure_credential_async_dev_env( + mock_aio_managed_identity_credential, mock_aio_default_azure_credential, mock_getenv +): + """Test get_azure_credential_async in dev environment.""" + mock_getenv.return_value = "dev" + mock_aio_default_credential = MagicMock() + mock_aio_default_azure_credential.return_value = mock_aio_default_credential + + credential = await azure_credential_utils.get_azure_credential_async() + + mock_getenv.assert_called_once_with("APP_ENV", "prod") + mock_aio_default_azure_credential.assert_called_once() + mock_aio_managed_identity_credential.assert_not_called() + assert credential == mock_aio_default_credential + + +@pytest.mark.asyncio +@patch("backend.batch.utilities.helpers.azure_credential_utils.os.getenv") +@patch( + "backend.batch.utilities.helpers.azure_credential_utils.AioDefaultAzureCredential" +) +@patch( + "backend.batch.utilities.helpers.azure_credential_utils.AioManagedIdentityCredential" +) +async def test_get_azure_credential_async_non_dev_env( + mock_aio_managed_identity_credential, mock_aio_default_azure_credential, mock_getenv +): + """Test get_azure_credential_async in non-dev environment.""" + mock_getenv.return_value = "prod" + mock_aio_managed_credential = MagicMock() + mock_aio_managed_identity_credential.return_value = mock_aio_managed_credential + + credential = await azure_credential_utils.get_azure_credential_async( + client_id="test-client-id" + ) + + mock_getenv.assert_called_once_with("APP_ENV", "prod") + mock_aio_managed_identity_credential.assert_called_once_with( + client_id="test-client-id" + ) + mock_aio_default_azure_credential.assert_not_called() + assert credential == mock_aio_managed_credential diff --git a/code/tests/utilities/helpers/test_azure_postgres_helper.py b/code/tests/utilities/helpers/test_azure_postgres_helper.py index 7fc10fcec..fb908acab 100644 --- a/code/tests/utilities/helpers/test_azure_postgres_helper.py +++ b/code/tests/utilities/helpers/test_azure_postgres_helper.py @@ -5,9 +5,7 @@ class TestAzurePostgresHelper(unittest.TestCase): - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") def test_create_search_client_success(self, mock_connect, mock_credential): # Arrange @@ -52,14 +50,10 @@ def test_get_search_client_reuses_connection(self, mock_connect): self.assertEqual(connection, mock_connection) mock_connect.assert_not_called() # Ensure no new connection is created - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.RealDictCursor") - def test_get_vector_store_success( - self, mock_cursor, mock_connect, mock_credential - ): + def test_get_vector_store_success(self, mock_cursor, mock_connect, mock_credential): # Arrange # Mock the EnvHelper and set required attributes mock_env_helper = MagicMock() @@ -101,9 +95,7 @@ def test_get_vector_store_success( "host=mock_host user=mock_user dbname=mock_database password=mock-access-token" ) - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") def test_get_vector_store_query_error(self, mock_connect, mock_credential): # Arrange @@ -142,9 +134,7 @@ def raise_exception(*args, **kwargs): self.assertEqual(str(context.exception), "Query execution error") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") def test_create_search_client_connection_error(self, mock_connect, mock_credential): # Arrange @@ -174,9 +164,7 @@ def raise_exception(*args, **kwargs): self.assertEqual(str(context.exception), "Connection error") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") def test_get_files_success(self, mock_env_helper, mock_connect, mock_credential): @@ -215,9 +203,7 @@ def test_get_files_success(self, mock_env_helper, mock_connect, mock_credential) ) mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") def test_get_files_no_results(self, mock_env_helper, mock_connect, mock_credential): @@ -251,9 +237,7 @@ def test_get_files_no_results(self, mock_env_helper, mock_connect, mock_credenti self.assertIsNone(result) mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @@ -292,9 +276,7 @@ def test_get_files_db_error( ) mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @@ -333,9 +315,7 @@ def test_get_files_unexpected_error( ) mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -377,9 +357,7 @@ def test_delete_documents_success( mock_connection.close.assert_called_once() mock_logger.info.assert_called_with("Deleted 3 documents.") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -417,9 +395,7 @@ def test_delete_documents_no_ids( mock_logger.warning.assert_called_with("No IDs provided for deletion.") mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -461,9 +437,7 @@ def test_delete_documents_db_error( mock_connection.rollback.assert_called_once() mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -505,9 +479,7 @@ def test_delete_documents_unexpected_error( mock_connection.rollback.assert_called_once() mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -558,9 +530,7 @@ def test_perform_search_success( mock_connection.close.assert_called_once() mock_logger.info.assert_called_with("Retrieved 1 search result(s).") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -602,9 +572,7 @@ def test_perform_search_no_results( mock_connection.close.assert_called_once() mock_logger.info.assert_called_with("Retrieved 0 search result(s).") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -645,9 +613,7 @@ def test_perform_search_error( ) mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -692,9 +658,7 @@ def test_get_unique_files_success( mock_connection.close.assert_called_once() mock_logger.info.assert_called_with("Retrieved 2 unique title(s).") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -734,9 +698,7 @@ def test_get_unique_files_no_results( mock_connection.close.assert_called_once() mock_logger.info.assert_called_with("Retrieved 0 unique title(s).") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -775,9 +737,7 @@ def test_get_unique_files_error( ) mock_connection.close.assert_called_once() - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -823,9 +783,7 @@ def test_search_by_blob_url_success( mock_connection.close.assert_called_once() mock_logger.info.assert_called_with("Retrieved 2 unique title(s).") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") @@ -866,9 +824,7 @@ def test_search_by_blob_url_no_results( mock_connection.close.assert_called_once() mock_logger.info.assert_called_with("Retrieved 0 unique title(s).") - @patch( - "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential" - ) + @patch("backend.batch.utilities.helpers.azure_postgres_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect") @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger") @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper") diff --git a/code/tests/utilities/helpers/test_azure_search_helper.py b/code/tests/utilities/helpers/test_azure_search_helper.py index 4d246c021..53e71db78 100644 --- a/code/tests/utilities/helpers/test_azure_search_helper.py +++ b/code/tests/utilities/helpers/test_azure_search_helper.py @@ -32,6 +32,8 @@ AZURE_SEARCH_TITLE_COLUMN = "mock-title" AZURE_SEARCH_FIELDS_METADATA = "mock-metadata" AZURE_SEARCH_SOURCE_COLUMN = "mock-source" +AZURE_SEARCH_TEXT_COLUMN = "mock-text" +AZURE_SEARCH_LAYOUT_TEXT_COLUMN = "mock-layout-text" AZURE_SEARCH_CHUNK_COLUMN = "mock-chunk" AZURE_SEARCH_OFFSET_COLUMN = "mock-offset" AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG = "default" @@ -78,6 +80,8 @@ def env_helper_mock(): env_helper.AZURE_SEARCH_TITLE_COLUMN = AZURE_SEARCH_TITLE_COLUMN env_helper.AZURE_SEARCH_FIELDS_METADATA = AZURE_SEARCH_FIELDS_METADATA env_helper.AZURE_SEARCH_SOURCE_COLUMN = AZURE_SEARCH_SOURCE_COLUMN + env_helper.AZURE_SEARCH_TEXT_COLUMN = AZURE_SEARCH_TEXT_COLUMN + env_helper.AZURE_SEARCH_LAYOUT_TEXT_COLUMN = AZURE_SEARCH_LAYOUT_TEXT_COLUMN env_helper.AZURE_SEARCH_CHUNK_COLUMN = AZURE_SEARCH_CHUNK_COLUMN env_helper.AZURE_SEARCH_OFFSET_COLUMN = AZURE_SEARCH_OFFSET_COLUMN env_helper.AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG = ( @@ -138,7 +142,7 @@ def test_creates_search_clients_with_keys( @patch("backend.batch.utilities.helpers.azure_search_helper.SearchClient") @patch("backend.batch.utilities.helpers.azure_search_helper.SearchIndexClient") -@patch("backend.batch.utilities.helpers.azure_search_helper.DefaultAzureCredential") +@patch("backend.batch.utilities.helpers.azure_search_helper.get_azure_credential") def test_creates_search_clients_with_rabc( default_azure_credential_mock: MagicMock, search_index_client_mock: MagicMock, @@ -232,6 +236,16 @@ def test_creates_search_index_if_not_exists( type=SearchFieldDataType.Int32, filterable=True, ), + SearchableField( + name=AZURE_SEARCH_TEXT_COLUMN, + type=SearchFieldDataType.String, + filterable=False, + ), + SearchableField( + name=AZURE_SEARCH_LAYOUT_TEXT_COLUMN, + type=SearchFieldDataType.String, + filterable=False, + ), ] expected_index = SearchIndex( diff --git a/code/tests/utilities/helpers/test_llm_helper.py b/code/tests/utilities/helpers/test_llm_helper.py index 6608ccc83..57a783a18 100644 --- a/code/tests/utilities/helpers/test_llm_helper.py +++ b/code/tests/utilities/helpers/test_llm_helper.py @@ -139,7 +139,7 @@ def test_generate_embeddings_returns_embeddings(azure_openai_mock): assert actual_embeddings == expected_embeddings -@patch("backend.batch.utilities.helpers.llm_helper.DefaultAzureCredential") +@patch("backend.batch.utilities.helpers.llm_helper.get_azure_credential") @patch("backend.batch.utilities.helpers.llm_helper.MLClient") def test_get_ml_client_initializes_with_expected_parameters( mock_ml_client, mock_default_credential, env_helper_mock diff --git a/docs/LOCAL_DEPLOYMENT.md b/docs/LOCAL_DEPLOYMENT.md index 7e8af57e3..2bcd1c0d8 100644 --- a/docs/LOCAL_DEPLOYMENT.md +++ b/docs/LOCAL_DEPLOYMENT.md @@ -108,6 +108,8 @@ provisioned resources. This file can be created manually at the root of the proj provisioned using `azd provision` or `azd up`, a `.env` file is automatically generated in the `.azure//.env` file. To get your `` run `azd env list` to see which env is default. +Set APP_ENV in your `.env` file to control Azure authentication. Set the environment variable to dev to use Azure CLI credentials, or to prod to use Managed Identity for production. Ensure you're logged in via az login when using dev in local. To configure your environment, ensure that APP_ENV is set to **"dev"** in your .env file. + The `AzureWebJobsStorage` needs to be added to your `.env` file manually. This can be retrieved from the function settings via the Azure Portal. @@ -186,6 +188,9 @@ Execute the above [shell command](#L81) to run the function locally. You may nee |AZURE_SEARCH_FIELDS_ID|id|`AZURE_SEARCH_FIELDS_ID`: Field from your Azure AI Search index that gives a unique idenitfier of the document chunk. `id` if you don't have a specific requirement.| |AZURE_SEARCH_FILENAME_COLUMN||`AZURE_SEARCH_FILENAME_COLUMN`: Field from your Azure AI Search index that gives a unique idenitfier of the source of your data to display in the UI.| |AZURE_SEARCH_TITLE_COLUMN||Field from your Azure AI Search index that gives a relevant title or header for your data content to display in the UI.| +|AZURE_SEARCH_SOURCE_COLUMN|source|Field from your Azure AI Search index that identifies the source of your data. `source` if you don't have a specific requirement.| +|AZURE_SEARCH_TEXT_COLUMN|text|Field from your Azure AI Search index that contains the main text content of your documents. `text` if you don't have a specific requirement.| +|AZURE_SEARCH_LAYOUT_TEXT_COLUMN|layoutText|Field from your Azure AI Search index that contains the layout-aware text content of your documents. `layoutText` if you don't have a specific requirement.| |AZURE_SEARCH_URL_COLUMN||Field from your Azure AI Search index that contains a URL for the document, e.g. an Azure Blob Storage URI. This value is not currently used.| |AZURE_SEARCH_FIELDS_TAG|tag|Field from your Azure AI Search index that contains tags for the document. `tag` if you don't have a specific requirement.| |AZURE_SEARCH_FIELDS_METADATA|metadata|Field from your Azure AI Search index that contains metadata for the document. `metadata` if you don't have a specific requirement.| diff --git a/docs/QuotaCheck.md b/docs/QuotaCheck.md new file mode 100644 index 000000000..f0657912f --- /dev/null +++ b/docs/QuotaCheck.md @@ -0,0 +1,101 @@ +## Check Quota Availability Before Deployment + +Before deploying the accelerator, **ensure sufficient quota availability** for the required model. + +> **For Global Standard |GPT-4.1- the capacity to at least 150k tokens post-deployment for optimal performance.** + +### Login if you have not done so already +``` +azd auth login +``` + + +### πŸ“Œ Default Models & Capacities: +``` +gpt4.1:30, text-embedding-ada-002:30 +``` +### πŸ“Œ Default Regions: +``` +francecentral, australiaeast, uksouth, eastus2, northcentralus, swedencentral, westus, westus2, southcentralus +``` +### Usage Scenarios: +- No parameters passed β†’ Default models and capacities will be checked in default regions. +- Only model(s) provided β†’ The script will check for those models in the default regions. +- Only region(s) provided β†’ The script will check default models in the specified regions. +- Both models and regions provided β†’ The script will check those models in the specified regions. +- `--verbose` passed β†’ Enables detailed logging output for debugging and traceability. + +### **Input Formats** +> Use the --models, --regions, and --verbose options for parameter handling: + +βœ”οΈ Run without parameters to check default models & regions without verbose logging: + ``` + ./quota_check_params.sh + ``` +βœ”οΈ Enable verbose logging: + ``` + ./quota_check_params.sh --verbose + ``` +βœ”οΈ Check specific model(s) in default regions: + ``` + ./quota_check_params.sh --models gpt4.1:30,text-embedding-ada-002:30 + ``` +βœ”οΈ Check default models in specific region(s): + ``` +./quota_check_params.sh --regions eastus2,westus + ``` +βœ”οΈ Passing Both models and regions: + ``` + ./quota_check_params.sh --models gpt4.1:30 --regions eastus2,westus + ``` +βœ”οΈ All parameters combined: + ``` + ./quota_check_params.sh --models gpt4.1:30,text-embedding-ada-002:30 --regions eastus2,westus --verbose + ``` + +### **Sample Output** +The final table lists regions with available quota. You can select any of these regions for deployment. + +![quota-check-ouput](images/quota-check-output.png) + +--- +### **If using Azure Portal and Cloud Shell** + +1. Navigate to the [Azure Portal](https://portal.azure.com). +2. Click on **Azure Cloud Shell** in the top right navigation menu. +3. Run the appropriate command based on your requirement: + + **To check quota for the deployment** + + ```sh + curl -L -o quota_check_params.sh "https://raw.githubusercontent.com/Azure-Samples/chat-with-your-data-solution-accelerator/main/scripts/quota_check_params.sh" + chmod +x quota_check_params.sh + ./quota_check_params.sh + ``` + - Refer to [Input Formats](#input-formats) for detailed commands. + +### **If using VS Code or Codespaces** +1. Open the terminal in VS Code or Codespaces. +2. If you're using VS Code, click the dropdown on the right side of the terminal window, and select `Git Bash`. + ![git_bash](images/git_bash.png) +3. Navigate to the `scripts` folder where the script files are located and make the script as executable: + ```sh + cd scripts + chmod +x quota_check_params.sh + ``` +4. Run the appropriate script based on your requirement: + + **To check quota for the deployment** + + ```sh + ./quota_check_params.sh + ``` + - Refer to [Input Formats](#input-formats) for detailed commands. + +5. If you see the error `_bash: az: command not found_`, install Azure CLI: + + ```sh + curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash + az login + ``` +6. Rerun the script after installing Azure CLI. diff --git a/docs/TEAMS_LOCAL_DEPLOYMENT.md b/docs/TEAMS_LOCAL_DEPLOYMENT.md index 0530a73ab..8e62c37c3 100644 --- a/docs/TEAMS_LOCAL_DEPLOYMENT.md +++ b/docs/TEAMS_LOCAL_DEPLOYMENT.md @@ -59,6 +59,9 @@ Or use the [Azure Functions VS Code extension](https://marketplace.visualstudio. |AZURE_SEARCH_FIELDS_ID|id|`AZURE_SEARCH_FIELDS_ID`: Field from your Azure AI Search index that gives a unique idenitfier of the document chunk. `id` if you don't have a specific requirement.| |AZURE_SEARCH_FILENAME_COLUMN||`AZURE_SEARCH_FILENAME_COLUMN`: Field from your Azure AI Search index that gives a unique idenitfier of the source of your data to display in the UI.| |AZURE_SEARCH_TITLE_COLUMN||Field from your Azure AI Search index that gives a relevant title or header for your data content to display in the UI.| +|AZURE_SEARCH_SOURCE_COLUMN|source|Field from your Azure AI Search index that identifies the source of your data. `source` if you don't have a specific requirement.| +|AZURE_SEARCH_TEXT_COLUMN|text|Field from your Azure AI Search index that contains the main text content of your documents. `text` if you don't have a specific requirement.| +|AZURE_SEARCH_LAYOUT_TEXT_COLUMN|layoutText|Field from your Azure AI Search index that contains the layout-aware text content of your documents. `layoutText` if you don't have a specific requirement.| |AZURE_SEARCH_URL_COLUMN||Field from your Azure AI Search index that contains a URL for the document, e.g. an Azure Blob Storage URI. This value is not currently used.| |AZURE_SEARCH_FIELDS_TAG|tag|Field from your Azure AI Search index that contains tags for the document. `tag` if you don't have a specific requirement.| |AZURE_SEARCH_FIELDS_METADATA|metadata|Field from your Azure AI Search index that contains metadata for the document. `metadata` if you don't have a specific requirement.| diff --git a/docs/images/git_bash.png b/docs/images/git_bash.png new file mode 100644 index 000000000..0e9f53a12 Binary files /dev/null and b/docs/images/git_bash.png differ diff --git a/docs/images/quota-check-output.png b/docs/images/quota-check-output.png new file mode 100644 index 000000000..9c80e3298 Binary files /dev/null and b/docs/images/quota-check-output.png differ diff --git a/docs/prompt_flow.md b/docs/prompt_flow.md index f0870d7f2..df045ecb8 100644 --- a/docs/prompt_flow.md +++ b/docs/prompt_flow.md @@ -45,7 +45,7 @@ This is a known issue and can be resolved by initiating a compute session. Once Follow these steps to update the flow: -1. Navigate to the flow in Azure Machine Learning Studio and make the necessary changes +1. Navigate to the flow in Azure Machine Learning Studio and make the necessary changes. 1. Download the updated flow files to the [../infra/prompt-flow/cwyd/](../infra/prompt-flow/cwyd/) directory by clicking the download button in the Files tab ![Download](images/prompt-flow-download.png) 1. Replace the content of `flow.dag.template.yaml` with your new flow from the `flow.dag.yaml` file diff --git a/infra/app/function.bicep b/infra/app/function.bicep index 16e0464c3..10a9e6dd9 100644 --- a/infra/app/function.bicep +++ b/infra/app/function.bicep @@ -220,3 +220,4 @@ module functionaccess '../core/security/keyvault-access.bicep' = if (useKeyVault output FUNCTION_IDENTITY_PRINCIPAL_ID string = function.outputs.identityPrincipalId output functionName string = function.outputs.name +output AzureWebJobsStorage string = function.outputs.azureWebJobsStorage diff --git a/infra/core/host/functions.bicep b/infra/core/host/functions.bicep index fac21a7d6..edf2843cb 100644 --- a/infra/core/host/functions.bicep +++ b/infra/core/host/functions.bicep @@ -111,3 +111,6 @@ resource storage 'Microsoft.Storage/storageAccounts@2021-09-01' existing = { output identityPrincipalId string = managedIdentity ? functions.outputs.identityPrincipalId : '' output name string = functions.outputs.name output uri string = functions.outputs.uri +output azureWebJobsStorage string = useKeyVault + ? 'DefaultEndpointsProtocol=https;AccountName=${storage.name};AccountKey=${storage.listKeys().keys[0].value};EndpointSuffix=${environment().suffixes.storage}' + : '' diff --git a/infra/main.bicep b/infra/main.bicep index d3d7c0fb9..21734bd54 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -9,7 +9,8 @@ var abbrs = loadJsonContent('./abbreviations.json') param resourceToken string = toLower(uniqueString(subscription().id, environmentName, location)) @description('Location for all resources, if you are using existing resource group provide the location of the resorce group.') -@metadata({azd: { +@metadata({ + azd: { type: 'location' } }) @@ -117,6 +118,12 @@ param azureSearchFieldsMetadata string = 'metadata' @description('Source column') param azureSearchSourceColumn string = 'source' +@description('Text column') +param azureSearchTextColumn string = 'text' + +@description('Layout Text column') +param azureSearchLayoutTextColumn string = 'layoutText' + @description('Chunk column') param azureSearchChunkColumn string = 'chunk' @@ -303,6 +310,9 @@ param useKeyVault bool = authType == 'rbac' ? false : true @description('Id of the user or app to assign application roles') param principalId string = '' +@description('Application Environment') +param appEnvironment string = 'Prod' + @description('Hosting model for the web apps. This value is fixed as "container", which uses prebuilt containers for faster deployment.') param hostingModel string = 'container' @@ -321,9 +331,6 @@ param recognizedLanguages string = 'en-US,fr-FR,de-DE,it-IT' @description('Azure Machine Learning Name') param azureMachineLearningName string = 'mlw-${resourceToken}' -@description('Resource ID of existing Log Analytics workspace. If not provided, a new one will be created.') -param existingLogAnalyticsResourceId string = '' - var blobContainerName = 'documents' var queueName = 'doc-processing' var clientKey = '${uniqueString(guid(subscription().id, deployment().name))}${newGuidString}' @@ -357,7 +364,9 @@ var semanticKernelSystemPrompt = '''You help employees to navigate only private resource rg 'Microsoft.Resources/resourceGroups@2021-04-01' = { name: rgName location: location - tags: tags + tags: union(tags, { + TemplateName: 'CWYD' + }) } // ========== Managed Identity ========== // @@ -428,7 +437,7 @@ var defaultOpenAiDeployments = [ version: azureOpenAIEmbeddingModelVersion } sku: { - name: 'Standard' + name: 'GlobalStandard' capacity: azureOpenAIEmbeddingModelCapacity } } @@ -672,7 +681,8 @@ module web './app/web.bicep' = if (hostingModel == 'code') { LOGLEVEL: logLevel DATABASE_TYPE: databaseType OPEN_AI_FUNCTIONS_SYSTEM_PROMPT: openAIFunctionsSystemPrompt - SEMENTIC_KERNEL_SYSTEM_PROMPT: semanticKernelSystemPrompt + SEMANTIC_KERNEL_SYSTEM_PROMPT: semanticKernelSystemPrompt + APP_ENV: appEnvironment }, // Conditionally add database-specific settings databaseType == 'CosmosDB' @@ -697,6 +707,8 @@ module web './app/web.bicep' = if (hostingModel == 'code') { AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn + AZURE_SEARCH_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchTextColumn : '' + AZURE_SEARCH_LAYOUT_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchLayoutTextColumn : '' AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn @@ -784,7 +796,8 @@ module web_docker './app/web.bicep' = if (hostingModel == 'container') { LOGLEVEL: logLevel DATABASE_TYPE: databaseType OPEN_AI_FUNCTIONS_SYSTEM_PROMPT: openAIFunctionsSystemPrompt - SEMENTIC_KERNEL_SYSTEM_PROMPT: semanticKernelSystemPrompt + SEMANTIC_KERNEL_SYSTEM_PROMPT: semanticKernelSystemPrompt + APP_ENV: appEnvironment }, // Conditionally add database-specific settings databaseType == 'CosmosDB' @@ -809,6 +822,8 @@ module web_docker './app/web.bicep' = if (hostingModel == 'container') { AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn + AZURE_SEARCH_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchTextColumn : '' + AZURE_SEARCH_LAYOUT_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchLayoutTextColumn : '' AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn @@ -886,6 +901,7 @@ module adminweb './app/adminweb.bicep' = if (hostingModel == 'code') { CONVERSATION_FLOW: conversationFlow LOGLEVEL: logLevel DATABASE_TYPE: databaseType + APP_ENV: appEnvironment }, // Conditionally add database-specific settings databaseType == 'CosmosDB' @@ -905,6 +921,8 @@ module adminweb './app/adminweb.bicep' = if (hostingModel == 'code') { AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn + AZURE_SEARCH_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchTextColumn : '' + AZURE_SEARCH_LAYOUT_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchLayoutTextColumn : '' AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn @@ -983,6 +1001,7 @@ module adminweb_docker './app/adminweb.bicep' = if (hostingModel == 'container') CONVERSATION_FLOW: conversationFlow LOGLEVEL: logLevel DATABASE_TYPE: databaseType + APP_ENV: appEnvironment }, // Conditionally add database-specific settings databaseType == 'CosmosDB' @@ -1002,6 +1021,8 @@ module adminweb_docker './app/adminweb.bicep' = if (hostingModel == 'container') AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn + AZURE_SEARCH_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchTextColumn : '' + AZURE_SEARCH_LAYOUT_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchLayoutTextColumn : '' AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn @@ -1109,6 +1130,7 @@ module function './app/function.bicep' = if (hostingModel == 'code') { LOGLEVEL: logLevel AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage DATABASE_TYPE: databaseType + APP_ENV: appEnvironment }, // Conditionally add database-specific settings databaseType == 'CosmosDB' @@ -1124,6 +1146,8 @@ module function './app/function.bicep' = if (hostingModel == 'code') { AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn + AZURE_SEARCH_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchTextColumn : '' + AZURE_SEARCH_LAYOUT_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchLayoutTextColumn : '' AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn AZURE_SEARCH_TOP_K: azureSearchTopK @@ -1192,6 +1216,7 @@ module function_docker './app/function.bicep' = if (hostingModel == 'container') LOGLEVEL: logLevel AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage DATABASE_TYPE: databaseType + APP_ENV: appEnvironment }, // Conditionally add database-specific settings databaseType == 'CosmosDB' @@ -1207,6 +1232,8 @@ module function_docker './app/function.bicep' = if (hostingModel == 'container') AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn + AZURE_SEARCH_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchTextColumn : '' + AZURE_SEARCH_LAYOUT_TEXT_COLUMN: azureSearchUseIntegratedVectorization ? azureSearchLayoutTextColumn : '' AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn AZURE_SEARCH_TOP_K: azureSearchTopK @@ -1439,6 +1466,10 @@ var azureSearchServiceInfo = databaseType == 'CosmosDB' filename_column: azureSearchFilenameColumn filter: azureSearchFilter title_column: azureSearchTitleColumn + fields_metadata: azureSearchFieldsMetadata + source_column: azureSearchSourceColumn + text_column: azureSearchTextColumn + layout_column: azureSearchLayoutTextColumn url_column: azureSearchUrlColumn use_integrated_vectorization: azureSearchUseIntegratedVectorization index: azureSearchIndex @@ -1448,7 +1479,7 @@ var azureSearchServiceInfo = databaseType == 'CosmosDB' : '' var azureComputerVisionInfo = string({ - service_name: speechServiceName + service_name: computerVisionName endpoint: useAdvancedImageProcessing ? computerVision.outputs.endpoint : '' location: useAdvancedImageProcessing ? computerVision.outputs.location : '' key: useKeyVault ? storekeys.outputs.COMPUTER_VISION_KEY_NAME : '' @@ -1479,8 +1510,11 @@ var azureContentSafetyInfo = string({ key: useKeyVault ? storekeys.outputs.CONTENT_SAFETY_KEY_NAME : '' }) +var backendUrl = 'https://${functionName}.azurewebsites.net' + output APPLICATIONINSIGHTS_CONNECTION_STRING string = monitoring.outputs.applicationInsightsConnectionString output AZURE_APP_SERVICE_HOSTING_MODEL string = hostingModel +output APP_ENV string = appEnvironment output AZURE_BLOB_STORAGE_INFO string = azureBlobStorageInfo output AZURE_COMPUTER_VISION_INFO string = azureComputerVisionInfo output AZURE_CONTENT_SAFETY_INFO string = azureContentSafetyInfo @@ -1498,6 +1532,11 @@ output DOCUMENT_PROCESSING_QUEUE_NAME string = queueName output ORCHESTRATION_STRATEGY string = orchestrationStrategy output USE_KEY_VAULT bool = useKeyVault output AZURE_AUTH_TYPE string = authType +output BACKEND_URL string = backendUrl +output AzureWebJobsStorage string = hostingModel == 'code' + ? function.outputs.AzureWebJobsStorage + : function_docker.outputs.AzureWebJobsStorage +output FUNCTION_KEY string = clientKey output FRONTEND_WEBSITE_NAME string = hostingModel == 'code' ? web.outputs.FRONTEND_API_URI : web_docker.outputs.FRONTEND_API_URI @@ -1517,4 +1556,4 @@ output AZURE_COSMOSDB_INFO string = azureCosmosDBInfo output AZURE_POSTGRESQL_INFO string = azurePostgresDBInfo output DATABASE_TYPE string = databaseType output OPEN_AI_FUNCTIONS_SYSTEM_PROMPT string = openAIFunctionsSystemPrompt -output SEMENTIC_KERNEL_SYSTEM_PROMPT string = semanticKernelSystemPrompt +output SEMANTIC_KERNEL_SYSTEM_PROMPT string = semanticKernelSystemPrompt diff --git a/infra/main.bicepparam b/infra/main.bicepparam index 6d1fe4ad3..0d683f40c 100644 --- a/infra/main.bicepparam +++ b/infra/main.bicepparam @@ -3,6 +3,7 @@ using './main.bicep' param environmentName = readEnvironmentVariable('AZURE_ENV_NAME', 'env_name') param location = readEnvironmentVariable('AZURE_LOCATION', 'location') param principalId = readEnvironmentVariable('AZURE_PRINCIPAL_ID', 'principal_id') +param appEnvironment = readEnvironmentVariable('APP_ENV', 'Prod') // Please make sure to set this value to false when using rbac with AZURE_AUTH_TYPE @@ -27,6 +28,8 @@ param azureSearchVectorColumn = readEnvironmentVariable('AZURE_SEARCH_CONTENT_VE param azureSearchTitleColumn = readEnvironmentVariable('AZURE_SEARCH_TITLE_COLUMN', 'title') param azureSearchFieldsMetadata = readEnvironmentVariable('AZURE_SEARCH_FIELDS_METADATA', 'metadata') param azureSearchSourceColumn = readEnvironmentVariable('AZURE_SEARCH_SOURCE_COLUMN', 'source') +param azureSearchTextColumn = readEnvironmentVariable('AZURE_SEARCH_TEXT_COLUMN', 'text') +param azureSearchLayoutTextColumn = readEnvironmentVariable('AZURE_SEARCH_LAYOUT_TEXT_COLUMN', 'layoutText') param azureSearchChunkColumn = readEnvironmentVariable('AZURE_SEARCH_CHUNK_COLUMN', 'chunk') param azureSearchOffsetColumn = readEnvironmentVariable('AZURE_SEARCH_OFFSET_COLUMN', 'offset') diff --git a/infra/main.json b/infra/main.json index 1f0b06768..e6e9060a5 100644 --- a/infra/main.json +++ b/infra/main.json @@ -4,8 +4,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1150018300409326962" + "version": "0.36.177.2456", + "templateHash": "16352542150707902119" } }, "parameters": { @@ -232,6 +232,20 @@ "description": "Source column" } }, + "azureSearchTextColumn": { + "type": "string", + "defaultValue": "text", + "metadata": { + "description": "Text column" + } + }, + "azureSearchLayoutTextColumn": { + "type": "string", + "defaultValue": "layoutText", + "metadata": { + "description": "Layout Text column" + } + }, "azureSearchChunkColumn": { "type": "string", "defaultValue": "chunk", @@ -618,6 +632,13 @@ "description": "Id of the user or app to assign application roles" } }, + "appEnvironment": { + "type": "string", + "defaultValue": "Prod", + "metadata": { + "description": "Application Environment" + } + }, "hostingModel": { "type": "string", "defaultValue": "container", @@ -649,13 +670,6 @@ "metadata": { "description": "Azure Machine Learning Name" } - }, - "existingLogAnalyticsResourceId": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "Resource ID of existing Log Analytics workspace. If not provided, a new one will be created." - } } }, "variables": { @@ -921,14 +935,15 @@ "version": "[parameters('azureOpenAIEmbeddingModelVersion')]" }, "sku": { - "name": "Standard", + "name": "GlobalStandard", "capacity": "[parameters('azureOpenAIEmbeddingModelCapacity')]" } } ], "openAiDeployments": "[concat(variables('defaultOpenAiDeployments'), if(parameters('useAdvancedImageProcessing'), createArray(createObject('name', parameters('azureOpenAIVisionModel'), 'model', createObject('format', 'OpenAI', 'name', parameters('azureOpenAIVisionModelName'), 'version', parameters('azureOpenAIVisionModelVersion')), 'sku', createObject('name', 'Standard', 'capacity', parameters('azureOpenAIVisionModelCapacity')))), createArray()))]", "azureOpenAIModelInfo": "[string(createObject('model', parameters('azureOpenAIModel'), 'model_name', parameters('azureOpenAIModelName'), 'model_version', parameters('azureOpenAIModelVersion')))]", - "azureOpenAIEmbeddingModelInfo": "[string(createObject('model', parameters('azureOpenAIEmbeddingModel'), 'model_name', parameters('azureOpenAIEmbeddingModelName'), 'model_version', parameters('azureOpenAIEmbeddingModelVersion')))]" + "azureOpenAIEmbeddingModelInfo": "[string(createObject('model', parameters('azureOpenAIEmbeddingModel'), 'model_name', parameters('azureOpenAIEmbeddingModelName'), 'model_version', parameters('azureOpenAIEmbeddingModelVersion')))]", + "backendUrl": "[format('https://{0}.azurewebsites.net', parameters('functionName'))]" }, "resources": [ { @@ -936,7 +951,7 @@ "apiVersion": "2021-04-01", "name": "[parameters('rgName')]", "location": "[parameters('location')]", - "tags": "[variables('tags')]" + "tags": "[union(variables('tags'), createObject('TemplateName', 'CWYD'))]" }, { "condition": "[equals(parameters('databaseType'), 'PostgreSQL')]", @@ -966,8 +981,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "13552365542706136811" + "version": "0.36.177.2456", + "templateHash": "5332626978409423867" } }, "parameters": { @@ -1058,8 +1073,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "16238674825125616801" + "version": "0.36.177.2456", + "templateHash": "2450648193631066644" } }, "parameters": { @@ -1230,8 +1245,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "18258663885754684875" + "version": "0.36.177.2456", + "templateHash": "8099859803038218986" } }, "parameters": { @@ -1460,8 +1475,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "13654700215438528863" + "version": "0.36.177.2456", + "templateHash": "10950365812388137062" }, "description": "Creates an Azure Key Vault." }, @@ -1562,8 +1577,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "5396502874055092713" + "version": "0.36.177.2456", + "templateHash": "10797488511727118382" }, "description": "Creates an Azure Cognitive Services instance." }, @@ -1721,8 +1736,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "5396502874055092713" + "version": "0.36.177.2456", + "templateHash": "10797488511727118382" }, "description": "Creates an Azure Cognitive Services instance." }, @@ -1874,8 +1889,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -1945,8 +1960,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -2016,8 +2031,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -2087,8 +2102,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -2162,8 +2177,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "5396502874055092713" + "version": "0.36.177.2456", + "templateHash": "10797488511727118382" }, "description": "Creates an Azure Cognitive Services instance." }, @@ -2333,8 +2348,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "16435352906283381381" + "version": "0.36.177.2456", + "templateHash": "6318410411178559325" } }, "parameters": { @@ -2600,8 +2615,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "7022850395133125583" + "version": "0.36.177.2456", + "templateHash": "16006377011416272456" }, "description": "Creates an Azure AI Search instance." }, @@ -2769,8 +2784,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "8289034454652170240" + "version": "0.36.177.2456", + "templateHash": "1018313823442323683" }, "description": "Creates an Azure App Service plan." }, @@ -2900,7 +2915,7 @@ "value": "[parameters('authType')]" }, "appSettings": { - "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'AZURE_SPEECH_SERVICE_NAME', parameters('speechServiceName'), 'AZURE_SPEECH_SERVICE_REGION', parameters('location'), 'AZURE_SPEECH_RECOGNIZER_LANGUAGES', parameters('recognizedLanguages'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'ADVANCED_IMAGE_PROCESSING_MAX_IMAGES', parameters('advancedImageProcessingMaxImages'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType'), 'OPEN_AI_FUNCTIONS_SYSTEM_PROMPT', variables('openAIFunctionsSystemPrompt'), 'SEMENTIC_KERNEL_SYSTEM_PROMPT', variables('semanticKernelSystemPrompt')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_COSMOSDB_ACCOUNT_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, 'AZURE_COSMOSDB_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, 'AZURE_COSMOSDB_CONVERSATIONS_CONTAINER_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName, 'AZURE_COSMOSDB_ENABLE_FEEDBACK', true(), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_CONVERSATIONS_LOG_INDEX', parameters('azureSearchConversationLogIndex'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', parameters('websiteName')), createObject())))]" + "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'AZURE_SPEECH_SERVICE_NAME', parameters('speechServiceName'), 'AZURE_SPEECH_SERVICE_REGION', parameters('location'), 'AZURE_SPEECH_RECOGNIZER_LANGUAGES', parameters('recognizedLanguages'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'ADVANCED_IMAGE_PROCESSING_MAX_IMAGES', parameters('advancedImageProcessingMaxImages'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType'), 'OPEN_AI_FUNCTIONS_SYSTEM_PROMPT', variables('openAIFunctionsSystemPrompt'), 'SEMANTIC_KERNEL_SYSTEM_PROMPT', variables('semanticKernelSystemPrompt'), 'APP_ENV', parameters('appEnvironment')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_COSMOSDB_ACCOUNT_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, 'AZURE_COSMOSDB_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, 'AZURE_COSMOSDB_CONVERSATIONS_CONTAINER_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName, 'AZURE_COSMOSDB_ENABLE_FEEDBACK', true(), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_CONVERSATIONS_LOG_INDEX', parameters('azureSearchConversationLogIndex'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchTextColumn'), ''), 'AZURE_SEARCH_LAYOUT_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchLayoutTextColumn'), ''), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', parameters('websiteName')), createObject())))]" } }, "template": { @@ -2909,8 +2924,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "16642907920540663373" + "version": "0.36.177.2456", + "templateHash": "13467324760773914188" } }, "parameters": { @@ -3098,8 +3113,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "7056589812564712804" + "version": "0.36.177.2456", + "templateHash": "5520933625191059047" }, "description": "Creates an Azure App Service in an existing Azure App Service plan." }, @@ -3325,8 +3340,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "8872422051335608470" + "version": "0.36.177.2456", + "templateHash": "2114937881746412139" }, "description": "Updates app settings for an Azure App Service." }, @@ -3403,8 +3418,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -3472,8 +3487,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -3541,8 +3556,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -3610,8 +3625,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -3676,8 +3691,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "13097350302282890335" + "version": "0.36.177.2456", + "templateHash": "15649900872986233495" }, "description": "Assigns an Azure Key Vault access policy." }, @@ -3751,8 +3766,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "399023243105742355" + "version": "0.36.177.2456", + "templateHash": "9287160422728403181" }, "description": "Creates a SQL role assignment under an Azure Cosmos DB account." }, @@ -3889,7 +3904,7 @@ "value": "[parameters('authType')]" }, "appSettings": { - "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'AZURE_SPEECH_SERVICE_NAME', parameters('speechServiceName'), 'AZURE_SPEECH_SERVICE_REGION', parameters('location'), 'AZURE_SPEECH_RECOGNIZER_LANGUAGES', parameters('recognizedLanguages'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'ADVANCED_IMAGE_PROCESSING_MAX_IMAGES', parameters('advancedImageProcessingMaxImages'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType'), 'OPEN_AI_FUNCTIONS_SYSTEM_PROMPT', variables('openAIFunctionsSystemPrompt'), 'SEMENTIC_KERNEL_SYSTEM_PROMPT', variables('semanticKernelSystemPrompt')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_COSMOSDB_ACCOUNT_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, 'AZURE_COSMOSDB_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, 'AZURE_COSMOSDB_CONVERSATIONS_CONTAINER_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName, 'AZURE_COSMOSDB_ENABLE_FEEDBACK', true(), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_CONVERSATIONS_LOG_INDEX', parameters('azureSearchConversationLogIndex'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', format('{0}-docker', parameters('websiteName'))), createObject())))]" + "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'AZURE_SPEECH_SERVICE_NAME', parameters('speechServiceName'), 'AZURE_SPEECH_SERVICE_REGION', parameters('location'), 'AZURE_SPEECH_RECOGNIZER_LANGUAGES', parameters('recognizedLanguages'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'ADVANCED_IMAGE_PROCESSING_MAX_IMAGES', parameters('advancedImageProcessingMaxImages'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType'), 'OPEN_AI_FUNCTIONS_SYSTEM_PROMPT', variables('openAIFunctionsSystemPrompt'), 'SEMANTIC_KERNEL_SYSTEM_PROMPT', variables('semanticKernelSystemPrompt'), 'APP_ENV', parameters('appEnvironment')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_COSMOSDB_ACCOUNT_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, 'AZURE_COSMOSDB_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, 'AZURE_COSMOSDB_CONVERSATIONS_CONTAINER_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName, 'AZURE_COSMOSDB_ENABLE_FEEDBACK', true(), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_CONVERSATIONS_LOG_INDEX', parameters('azureSearchConversationLogIndex'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchTextColumn'), ''), 'AZURE_SEARCH_LAYOUT_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchLayoutTextColumn'), ''), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', format('{0}-docker', parameters('websiteName'))), createObject())))]" } }, "template": { @@ -3898,8 +3913,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "16642907920540663373" + "version": "0.36.177.2456", + "templateHash": "13467324760773914188" } }, "parameters": { @@ -4087,8 +4102,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "7056589812564712804" + "version": "0.36.177.2456", + "templateHash": "5520933625191059047" }, "description": "Creates an Azure App Service in an existing Azure App Service plan." }, @@ -4314,8 +4329,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "8872422051335608470" + "version": "0.36.177.2456", + "templateHash": "2114937881746412139" }, "description": "Updates app settings for an Azure App Service." }, @@ -4392,8 +4407,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -4461,8 +4476,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -4530,8 +4545,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -4599,8 +4614,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -4665,8 +4680,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "13097350302282890335" + "version": "0.36.177.2456", + "templateHash": "15649900872986233495" }, "description": "Assigns an Azure Key Vault access policy." }, @@ -4740,8 +4755,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "399023243105742355" + "version": "0.36.177.2456", + "templateHash": "9287160422728403181" }, "description": "Creates a SQL role assignment under an Azure Cosmos DB account." }, @@ -4877,7 +4892,7 @@ "value": "[parameters('databaseType')]" }, "appSettings": { - "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'BACKEND_URL', format('https://{0}.azurewebsites.net', parameters('functionName')), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'FUNCTION_KEY', variables('clientKey'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', parameters('adminWebsiteName')), createObject())))]" + "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'BACKEND_URL', format('https://{0}.azurewebsites.net', parameters('functionName')), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'FUNCTION_KEY', variables('clientKey'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType'), 'APP_ENV', parameters('appEnvironment')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchTextColumn'), ''), 'AZURE_SEARCH_LAYOUT_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchLayoutTextColumn'), ''), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', parameters('adminWebsiteName')), createObject())))]" } }, "template": { @@ -4886,8 +4901,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "6540240067745016665" + "version": "0.36.177.2456", + "templateHash": "1282743988054217370" } }, "parameters": { @@ -5064,8 +5079,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "7056589812564712804" + "version": "0.36.177.2456", + "templateHash": "5520933625191059047" }, "description": "Creates an Azure App Service in an existing Azure App Service plan." }, @@ -5291,8 +5306,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "8872422051335608470" + "version": "0.36.177.2456", + "templateHash": "2114937881746412139" }, "description": "Updates app settings for an Azure App Service." }, @@ -5369,8 +5384,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -5438,8 +5453,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -5507,8 +5522,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -5576,8 +5591,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -5642,8 +5657,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "13097350302282890335" + "version": "0.36.177.2456", + "templateHash": "15649900872986233495" }, "description": "Assigns an Azure Key Vault access policy." }, @@ -5788,7 +5803,7 @@ "value": "[parameters('databaseType')]" }, "appSettings": { - "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'BACKEND_URL', format('https://{0}-docker.azurewebsites.net', parameters('functionName')), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'FUNCTION_KEY', variables('clientKey'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', format('{0}-docker', parameters('adminWebsiteName'))), createObject())))]" + "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'BACKEND_URL', format('https://{0}-docker.azurewebsites.net', parameters('functionName')), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'FUNCTION_KEY', variables('clientKey'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType'), 'APP_ENV', parameters('appEnvironment')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchTextColumn'), ''), 'AZURE_SEARCH_LAYOUT_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchLayoutTextColumn'), ''), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', format('{0}-docker', parameters('adminWebsiteName'))), createObject())))]" } }, "template": { @@ -5797,8 +5812,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "6540240067745016665" + "version": "0.36.177.2456", + "templateHash": "1282743988054217370" } }, "parameters": { @@ -5975,8 +5990,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "7056589812564712804" + "version": "0.36.177.2456", + "templateHash": "5520933625191059047" }, "description": "Creates an Azure App Service in an existing Azure App Service plan." }, @@ -6202,8 +6217,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "8872422051335608470" + "version": "0.36.177.2456", + "templateHash": "2114937881746412139" }, "description": "Updates app settings for an Azure App Service." }, @@ -6280,8 +6295,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -6349,8 +6364,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -6418,8 +6433,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -6487,8 +6502,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -6553,8 +6568,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "13097350302282890335" + "version": "0.36.177.2456", + "templateHash": "15649900872986233495" }, "description": "Assigns an Azure Key Vault access policy." }, @@ -6672,8 +6687,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "3170567318042083360" + "version": "0.36.177.2456", + "templateHash": "16408332100626942691" }, "description": "Creates an Application Insights instance and a Log Analytics workspace." }, @@ -6731,8 +6746,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "18046713010447151328" + "version": "0.36.177.2456", + "templateHash": "9147911423401029465" }, "description": "Creates a Log Analytics workspace." }, @@ -6823,8 +6838,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "9396713012578391259" + "version": "0.36.177.2456", + "templateHash": "2629382887193957945" }, "description": "Creates an Application Insights instance based on an existing Log Analytics workspace." }, @@ -6888,8 +6903,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "4772814496944658769" + "version": "0.36.177.2456", + "templateHash": "18292703974674172388" }, "description": "Creates a dashboard for an Application Insights instance." }, @@ -8225,8 +8240,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "14423036048920164527" + "version": "0.36.177.2456", + "templateHash": "1827052244678670127" } }, "parameters": { @@ -8308,8 +8323,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "14924009146925222912" + "version": "0.36.177.2456", + "templateHash": "16459689312294949684" } }, "parameters": { @@ -8465,7 +8480,7 @@ "value": "[parameters('databaseType')]" }, "appSettings": { - "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'LOGLEVEL', parameters('logLevel'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', parameters('functionName')), createObject())))]" + "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'LOGLEVEL', parameters('logLevel'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'DATABASE_TYPE', parameters('databaseType'), 'APP_ENV', parameters('appEnvironment')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchTextColumn'), ''), 'AZURE_SEARCH_LAYOUT_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchLayoutTextColumn'), ''), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', parameters('functionName')), createObject())))]" } }, "template": { @@ -8474,8 +8489,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "15315839047770251730" + "version": "0.36.177.2456", + "templateHash": "4396932483281426377" } }, "parameters": { @@ -8672,8 +8687,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "11161313400670591675" + "version": "0.36.177.2456", + "templateHash": "8452382259440518720" }, "description": "Creates an Azure Function in an existing Azure App Service plan." }, @@ -8883,8 +8898,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "7056589812564712804" + "version": "0.36.177.2456", + "templateHash": "5520933625191059047" }, "description": "Creates an Azure App Service in an existing Azure App Service plan." }, @@ -9110,8 +9125,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "8872422051335608470" + "version": "0.36.177.2456", + "templateHash": "2114937881746412139" }, "description": "Updates app settings for an Azure App Service." }, @@ -9187,8 +9202,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -9242,6 +9257,10 @@ "uri": { "type": "string", "value": "[reference(resourceId('Microsoft.Resources/deployments', format('{0}-functions', parameters('name'))), '2022-09-01').outputs.uri.value]" + }, + "azureWebJobsStorage": { + "type": "string", + "value": "[if(parameters('useKeyVault'), format('DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix={2}', parameters('storageAccountName'), listKeys(resourceId('Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value, environment().suffixes.storage), '')]" } } } @@ -9274,8 +9293,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -9343,8 +9362,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -9412,8 +9431,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -9481,8 +9500,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -9550,8 +9569,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -9616,8 +9635,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "13097350302282890335" + "version": "0.36.177.2456", + "templateHash": "15649900872986233495" }, "description": "Assigns an Azure Key Vault access policy." }, @@ -9673,6 +9692,10 @@ "functionName": { "type": "string", "value": "[reference(resourceId('Microsoft.Resources/deployments', format('{0}-app-module', parameters('name'))), '2022-09-01').outputs.name.value]" + }, + "AzureWebJobsStorage": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', format('{0}-app-module', parameters('name'))), '2022-09-01').outputs.azureWebJobsStorage.value]" } } } @@ -9761,7 +9784,7 @@ "value": "[parameters('databaseType')]" }, "appSettings": { - "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'LOGLEVEL', parameters('logLevel'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', format('{0}-docker', parameters('functionName'))), createObject())))]" + "value": "[union(createObject('AZURE_BLOB_ACCOUNT_NAME', parameters('storageAccountName'), 'AZURE_BLOB_CONTAINER_NAME', variables('blobContainerName'), 'AZURE_FORM_RECOGNIZER_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_MODEL', parameters('azureOpenAIModel'), 'AZURE_OPENAI_MODEL_NAME', parameters('azureOpenAIModelName'), 'AZURE_OPENAI_MODEL_VERSION', parameters('azureOpenAIModelVersion'), 'AZURE_OPENAI_EMBEDDING_MODEL', parameters('azureOpenAIEmbeddingModel'), 'AZURE_OPENAI_EMBEDDING_MODEL_NAME', parameters('azureOpenAIEmbeddingModelName'), 'AZURE_OPENAI_EMBEDDING_MODEL_VERSION', parameters('azureOpenAIEmbeddingModelVersion'), 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'LOGLEVEL', parameters('logLevel'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'DATABASE_TYPE', parameters('databaseType'), 'APP_ENV', parameters('appEnvironment')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchTextColumn'), ''), 'AZURE_SEARCH_LAYOUT_TEXT_COLUMN', if(parameters('azureSearchUseIntegratedVectorization'), parameters('azureSearchLayoutTextColumn'), ''), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_HOST_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'AZURE_POSTGRESQL_DATABASE_NAME', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'AZURE_POSTGRESQL_USER', format('{0}-docker', parameters('functionName'))), createObject())))]" } }, "template": { @@ -9770,8 +9793,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "15315839047770251730" + "version": "0.36.177.2456", + "templateHash": "4396932483281426377" } }, "parameters": { @@ -9968,8 +9991,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "11161313400670591675" + "version": "0.36.177.2456", + "templateHash": "8452382259440518720" }, "description": "Creates an Azure Function in an existing Azure App Service plan." }, @@ -10179,8 +10202,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "7056589812564712804" + "version": "0.36.177.2456", + "templateHash": "5520933625191059047" }, "description": "Creates an Azure App Service in an existing Azure App Service plan." }, @@ -10406,8 +10429,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "8872422051335608470" + "version": "0.36.177.2456", + "templateHash": "2114937881746412139" }, "description": "Updates app settings for an Azure App Service." }, @@ -10483,8 +10506,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -10538,6 +10561,10 @@ "uri": { "type": "string", "value": "[reference(resourceId('Microsoft.Resources/deployments', format('{0}-functions', parameters('name'))), '2022-09-01').outputs.uri.value]" + }, + "azureWebJobsStorage": { + "type": "string", + "value": "[if(parameters('useKeyVault'), format('DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix={2}', parameters('storageAccountName'), listKeys(resourceId('Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value, environment().suffixes.storage), '')]" } } } @@ -10570,8 +10597,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -10639,8 +10666,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -10708,8 +10735,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -10777,8 +10804,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -10846,8 +10873,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -10912,8 +10939,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "13097350302282890335" + "version": "0.36.177.2456", + "templateHash": "15649900872986233495" }, "description": "Assigns an Azure Key Vault access policy." }, @@ -10969,6 +10996,10 @@ "functionName": { "type": "string", "value": "[reference(resourceId('Microsoft.Resources/deployments', format('{0}-app-module', parameters('name'))), '2022-09-01').outputs.name.value]" + }, + "AzureWebJobsStorage": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', format('{0}-app-module', parameters('name'))), '2022-09-01').outputs.azureWebJobsStorage.value]" } } } @@ -11019,8 +11050,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "5396502874055092713" + "version": "0.36.177.2456", + "templateHash": "10797488511727118382" }, "description": "Creates an Azure Cognitive Services instance." }, @@ -11174,8 +11205,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "5396502874055092713" + "version": "0.36.177.2456", + "templateHash": "10797488511727118382" }, "description": "Creates an Azure Cognitive Services instance." }, @@ -11332,8 +11363,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "8223498772551098397" + "version": "0.36.177.2456", + "templateHash": "6867239882926035710" } }, "parameters": { @@ -11465,8 +11496,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "16260688333491233707" + "version": "0.36.177.2456", + "templateHash": "1888631454746920599" }, "description": "Creates an Azure storage account." }, @@ -11693,8 +11724,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -11763,8 +11794,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -11833,8 +11864,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -11903,8 +11934,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "1525080529756490231" + "version": "0.36.177.2456", + "templateHash": "14223167216489085881" }, "description": "Creates a role assignment for a service principal." }, @@ -11985,8 +12016,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "2884445231517776593" + "version": "0.36.177.2456", + "templateHash": "6858844329670181355" } }, "parameters": { @@ -12131,8 +12162,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.36.1.42791", - "templateHash": "14404355068161542913" + "version": "0.36.177.2456", + "templateHash": "11131134664964433363" } }, "parameters": { @@ -12216,13 +12247,17 @@ "type": "string", "value": "[parameters('hostingModel')]" }, + "APP_ENV": { + "type": "string", + "value": "[parameters('appEnvironment')]" + }, "AZURE_BLOB_STORAGE_INFO": { "type": "string", "value": "[string(createObject('container_name', variables('blobContainerName'), 'account_name', parameters('storageAccountName'), 'account_key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '')))]" }, "AZURE_COMPUTER_VISION_INFO": { "type": "string", - "value": "[string(createObject('service_name', parameters('speechServiceName'), 'endpoint', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'location', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.location.value, ''), 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COMPUTER_VISION_KEY_NAME.value, ''), 'vectorize_image_api_version', parameters('computerVisionVectorizeImageApiVersion'), 'vectorize_image_model_version', parameters('computerVisionVectorizeImageModelVersion')))]" + "value": "[string(createObject('service_name', parameters('computerVisionName'), 'endpoint', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'location', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.location.value, ''), 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COMPUTER_VISION_KEY_NAME.value, ''), 'vectorize_image_api_version', parameters('computerVisionVectorizeImageApiVersion'), 'vectorize_image_model_version', parameters('computerVisionVectorizeImageModelVersion')))]" }, "AZURE_CONTENT_SAFETY_INFO": { "type": "string", @@ -12258,7 +12293,7 @@ }, "AZURE_SEARCH_SERVICE_INFO": { "type": "string", - "value": "[if(equals(parameters('databaseType'), 'CosmosDB'), string(createObject('service_name', parameters('azureAISearchName'), 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SEARCH_KEY_NAME.value, ''), 'service', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName')), '2022-09-01').outputs.endpoint.value, 'use_semantic_search', parameters('azureSearchUseSemanticSearch'), 'semantic_search_config', parameters('azureSearchSemanticSearchConfig'), 'index_is_prechunked', parameters('azureSearchIndexIsPrechunked'), 'top_k', parameters('azureSearchTopK'), 'enable_in_domain', parameters('azureSearchEnableInDomain'), 'content_column', parameters('azureSearchContentColumn'), 'content_vector_column', parameters('azureSearchVectorColumn'), 'filename_column', parameters('azureSearchFilenameColumn'), 'filter', parameters('azureSearchFilter'), 'title_column', parameters('azureSearchTitleColumn'), 'url_column', parameters('azureSearchUrlColumn'), 'use_integrated_vectorization', parameters('azureSearchUseIntegratedVectorization'), 'index', parameters('azureSearchIndex'), 'indexer_name', parameters('azureSearchIndexer'), 'datasource_name', parameters('azureSearchDatasource'))), '')]" + "value": "[if(equals(parameters('databaseType'), 'CosmosDB'), string(createObject('service_name', parameters('azureAISearchName'), 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SEARCH_KEY_NAME.value, ''), 'service', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName')), '2022-09-01').outputs.endpoint.value, 'use_semantic_search', parameters('azureSearchUseSemanticSearch'), 'semantic_search_config', parameters('azureSearchSemanticSearchConfig'), 'index_is_prechunked', parameters('azureSearchIndexIsPrechunked'), 'top_k', parameters('azureSearchTopK'), 'enable_in_domain', parameters('azureSearchEnableInDomain'), 'content_column', parameters('azureSearchContentColumn'), 'content_vector_column', parameters('azureSearchVectorColumn'), 'filename_column', parameters('azureSearchFilenameColumn'), 'filter', parameters('azureSearchFilter'), 'title_column', parameters('azureSearchTitleColumn'), 'fields_metadata', parameters('azureSearchFieldsMetadata'), 'source_column', parameters('azureSearchSourceColumn'), 'text_column', parameters('azureSearchTextColumn'), 'layout_column', parameters('azureSearchLayoutTextColumn'), 'url_column', parameters('azureSearchUrlColumn'), 'use_integrated_vectorization', parameters('azureSearchUseIntegratedVectorization'), 'index', parameters('azureSearchIndex'), 'indexer_name', parameters('azureSearchIndexer'), 'datasource_name', parameters('azureSearchDatasource'))), '')]" }, "AZURE_SPEECH_SERVICE_INFO": { "type": "string", @@ -12284,6 +12319,18 @@ "type": "string", "value": "[parameters('authType')]" }, + "BACKEND_URL": { + "type": "string", + "value": "[variables('backendUrl')]" + }, + "AzureWebJobsStorage": { + "type": "string", + "value": "[if(equals(parameters('hostingModel'), 'code'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('functionName')), '2022-09-01').outputs.AzureWebJobsStorage.value, reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', format('{0}-docker', parameters('functionName'))), '2022-09-01').outputs.AzureWebJobsStorage.value)]" + }, + "FUNCTION_KEY": { + "type": "string", + "value": "[variables('clientKey')]" + }, "FRONTEND_WEBSITE_NAME": { "type": "string", "value": "[if(equals(parameters('hostingModel'), 'code'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', parameters('websiteName')), '2022-09-01').outputs.FRONTEND_API_URI.value, reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, parameters('rgName')), 'Microsoft.Resources/deployments', format('{0}-docker', parameters('websiteName'))), '2022-09-01').outputs.FRONTEND_API_URI.value)]" @@ -12336,7 +12383,7 @@ "type": "string", "value": "[variables('openAIFunctionsSystemPrompt')]" }, - "SEMENTIC_KERNEL_SYSTEM_PROMPT": { + "SEMANTIC_KERNEL_SYSTEM_PROMPT": { "type": "string", "value": "[variables('semanticKernelSystemPrompt')]" } diff --git a/scripts/checkquota.sh b/scripts/checkquota.sh index 4728b0d3d..4e388a6b5 100644 --- a/scripts/checkquota.sh +++ b/scripts/checkquota.sh @@ -33,7 +33,7 @@ echo "βœ… Azure subscription set successfully." # Define models and their minimum required capacities declare -A MIN_CAPACITY=( ["OpenAI.GlobalStandard.gpt4.1"]=$GPT_MIN_CAPACITY - ["OpenAI.Standard.text-embedding-ada-002"]=$TEXT_EMBEDDING_MIN_CAPACITY + ["OpenAI.GlobalStandard.text-embedding-ada-002"]=$TEXT_EMBEDDING_MIN_CAPACITY ) VALID_REGION="" diff --git a/scripts/data_scripts/azure_credential_utils.py b/scripts/data_scripts/azure_credential_utils.py new file mode 100644 index 000000000..e8d9d7051 --- /dev/null +++ b/scripts/data_scripts/azure_credential_utils.py @@ -0,0 +1,48 @@ +import os +from azure.identity import ManagedIdentityCredential, DefaultAzureCredential +from azure.identity.aio import ( + ManagedIdentityCredential as AioManagedIdentityCredential, + DefaultAzureCredential as AioDefaultAzureCredential, +) + + +async def get_azure_credential_async(client_id=None): + """ + Returns an Azure credential asynchronously based on the application environment. + + If the environment is 'dev', it uses AioDefaultAzureCredential. + Otherwise, it uses AioManagedIdentityCredential. + + Args: + client_id (str, optional): The client ID for the Managed Identity Credential. + + Returns: + Credential object: Either AioDefaultAzureCredential or AioManagedIdentityCredential. + """ + if os.getenv("APP_ENV", "prod").lower() == "dev": + return ( + AioDefaultAzureCredential() + ) # CodeQL [SM05139] Okay use of DefaultAzureCredential as it is only used in development + else: + return AioManagedIdentityCredential(client_id=client_id) + + +def get_azure_credential(client_id=None): + """ + Returns an Azure credential based on the application environment. + + If the environment is 'dev', it uses DefaultAzureCredential. + Otherwise, it uses ManagedIdentityCredential. + + Args: + client_id (str, optional): The client ID for the Managed Identity Credential. + + Returns: + Credential object: Either DefaultAzureCredential or ManagedIdentityCredential. + """ + if os.getenv("APP_ENV", "prod").lower() == "dev": + return ( + DefaultAzureCredential() + ) # CodeQL [SM05139] Okay use of DefaultAzureCredential as it is only used in development + else: + return ManagedIdentityCredential(client_id=client_id) diff --git a/scripts/data_scripts/create_postgres_tables.py b/scripts/data_scripts/create_postgres_tables.py index 805fd7621..14e395d4f 100644 --- a/scripts/data_scripts/create_postgres_tables.py +++ b/scripts/data_scripts/create_postgres_tables.py @@ -1,4 +1,4 @@ -from azure.identity import DefaultAzureCredential +from azure_credential_utils import get_azure_credential import psycopg2 from psycopg2 import sql @@ -61,7 +61,7 @@ def grant_permissions(cursor, dbname, schema_name, principal_name): # Acquire the access token -cred = DefaultAzureCredential() +cred = get_azure_credential() access_token = cred.get_token("https://ossrdbms-aad.database.windows.net/.default") # Combine the token with the connection string to establish the connection. diff --git a/scripts/data_scripts/requirements.txt b/scripts/data_scripts/requirements.txt index 3cb4d1b3e..614c2c4bd 100644 --- a/scripts/data_scripts/requirements.txt +++ b/scripts/data_scripts/requirements.txt @@ -1,3 +1,3 @@ psycopg2-binary==2.9.10 -azure-identity==1.19.0 -azure-keyvault-secrets==4.9.0 +azure-identity==1.23.1 +azure-keyvault-secrets==4.7.0 diff --git a/scripts/quota_check_params.sh b/scripts/quota_check_params.sh new file mode 100644 index 000000000..896010f62 --- /dev/null +++ b/scripts/quota_check_params.sh @@ -0,0 +1,246 @@ +#!/bin/bash +# VERBOSE=false + +MODELS="" +REGIONS="" +VERBOSE=false + +while [[ $# -gt 0 ]]; do + case "$1" in + --models) + MODELS="$2" + shift 2 + ;; + --regions) + REGIONS="$2" + shift 2 + ;; + --verbose) + VERBOSE=true + shift + ;; + *) + echo "Unknown option: $1" + exit 1 + ;; + esac +done + +# Fallback to defaults if not provided +[[ -z "$MODELS" ]] +[[ -z "$REGIONS" ]] + +echo "Models: $MODELS" +echo "Regions: $REGIONS" +echo "Verbose: $VERBOSE" + +for arg in "$@"; do + if [ "$arg" = "--verbose" ]; then + VERBOSE=true + fi +done + +log_verbose() { + if [ "$VERBOSE" = true ]; then + echo "$1" + fi +} + +# Default Models and Capacities (Comma-separated in "model:capacity" format) +DEFAULT_MODEL_CAPACITY="gpt4.1:30,text-embedding-ada-002:30" + +# Convert the comma-separated string into an array +IFS=',' read -r -a MODEL_CAPACITY_PAIRS <<< "$DEFAULT_MODEL_CAPACITY" + +echo "πŸ”„ Fetching available Azure subscriptions..." +SUBSCRIPTIONS=$(az account list --query "[?state=='Enabled'].{Name:name, ID:id}" --output tsv) +SUB_COUNT=$(echo "$SUBSCRIPTIONS" | wc -l) + +if [ "$SUB_COUNT" -eq 0 ]; then + echo "❌ ERROR: No active Azure subscriptions found. Please log in using 'az login' and ensure you have an active subscription." + exit 1 +elif [ "$SUB_COUNT" -eq 1 ]; then + # If only one subscription, automatically select it + AZURE_SUBSCRIPTION_ID=$(echo "$SUBSCRIPTIONS" | awk '{print $2}') + if [ -z "$AZURE_SUBSCRIPTION_ID" ]; then + echo "❌ ERROR: No active Azure subscriptions found. Please log in using 'az login' and ensure you have an active subscription." + exit 1 + fi + echo "βœ… Using the only available subscription: $AZURE_SUBSCRIPTION_ID" +else + # If multiple subscriptions exist, prompt the user to choose one + echo "Multiple subscriptions found:" + echo "$SUBSCRIPTIONS" | awk '{print NR")", $1, "-", $2}' + + while true; do + echo "Enter the number of the subscription to use:" + read SUB_INDEX + + # Validate user input + if [[ "$SUB_INDEX" =~ ^[0-9]+$ ]] && [ "$SUB_INDEX" -ge 1 ] && [ "$SUB_INDEX" -le "$SUB_COUNT" ]; then + AZURE_SUBSCRIPTION_ID=$(echo "$SUBSCRIPTIONS" | awk -v idx="$SUB_INDEX" 'NR==idx {print $2}') + echo "βœ… Selected Subscription: $AZURE_SUBSCRIPTION_ID" + break + else + echo "❌ Invalid selection. Please enter a valid number from the list." + fi + done +fi + + +# Set the selected subscription +az account set --subscription "$AZURE_SUBSCRIPTION_ID" +echo "🎯 Active Subscription: $(az account show --query '[name, id]' --output tsv)" + +# Default Regions to check (Comma-separated, now configurable) +DEFAULT_REGIONS="francecentral,australiaeast,uksouth,eastus2,northcentralus,swedencentral,westus,westus2,southcentralus" +IFS=',' read -r -a DEFAULT_REGION_ARRAY <<< "$DEFAULT_REGIONS" + +# Read parameters (if any) +IFS=',' read -r -a USER_PROVIDED_PAIRS <<< "$MODELS" +USER_REGION="$REGIONS" + +IS_USER_PROVIDED_PAIRS=false + +if [ ${#USER_PROVIDED_PAIRS[@]} -lt 1 ]; then + echo "No parameters provided, using default model-capacity pairs: ${MODEL_CAPACITY_PAIRS[*]}" +else + echo "Using provided model and capacity pairs: ${USER_PROVIDED_PAIRS[*]}" + IS_USER_PROVIDED_PAIRS=true + MODEL_CAPACITY_PAIRS=("${USER_PROVIDED_PAIRS[@]}") +fi + +declare -a FINAL_MODEL_NAMES +declare -a FINAL_CAPACITIES +declare -a TABLE_ROWS + +for PAIR in "${MODEL_CAPACITY_PAIRS[@]}"; do + MODEL_NAME=$(echo "$PAIR" | cut -d':' -f1 | tr '[:upper:]' '[:lower:]') + CAPACITY=$(echo "$PAIR" | cut -d':' -f2) + + if [ -z "$MODEL_NAME" ] || [ -z "$CAPACITY" ]; then + echo "❌ ERROR: Invalid model and capacity pair '$PAIR'. Both model and capacity must be specified." + exit 1 + fi + + FINAL_MODEL_NAMES+=("$MODEL_NAME") + FINAL_CAPACITIES+=("$CAPACITY") + +done + +echo "πŸ”„ Using Models: ${FINAL_MODEL_NAMES[*]} with respective Capacities: ${FINAL_CAPACITIES[*]}" +echo "----------------------------------------" + +# Check if the user provided a region, if not, use the default regions +if [ -n "$USER_REGION" ]; then + echo "πŸ” User provided region: $USER_REGION" + IFS=',' read -r -a REGIONS <<< "$USER_REGION" +else + echo "No region specified, using default regions: ${DEFAULT_REGION_ARRAY[*]}" + REGIONS=("${DEFAULT_REGION_ARRAY[@]}") + APPLY_OR_CONDITION=true +fi + +echo "βœ… Retrieved Azure regions. Checking availability..." +INDEX=1 + +VALID_REGIONS=() +for REGION in "${REGIONS[@]}"; do + log_verbose "----------------------------------------" + log_verbose "πŸ” Checking region: $REGION" + + QUOTA_INFO=$(az cognitiveservices usage list --location "$REGION" --output json | tr '[:upper:]' '[:lower:]') + if [ -z "$QUOTA_INFO" ]; then + log_verbose "⚠️ WARNING: Failed to retrieve quota for region $REGION. Skipping." + continue + fi + + TEXT_EMBEDDING_AVAILABLE=false + AT_LEAST_ONE_MODEL_AVAILABLE=false + TEMP_TABLE_ROWS=() + + for index in "${!FINAL_MODEL_NAMES[@]}"; do + MODEL_NAME="${FINAL_MODEL_NAMES[$index]}" + REQUIRED_CAPACITY="${FINAL_CAPACITIES[$index]}" + FOUND=false + INSUFFICIENT_QUOTA=false + + MODEL_TYPES=("openai.standard.$MODEL_NAME" "openai.globalstandard.$MODEL_NAME") + + for MODEL_TYPE in "${MODEL_TYPES[@]}"; do + FOUND=false + INSUFFICIENT_QUOTA=false + log_verbose "πŸ” Checking model: $MODEL_NAME with required capacity: $REQUIRED_CAPACITY ($MODEL_TYPE)" + + MODEL_INFO=$(echo "$QUOTA_INFO" | awk -v model="\"value\": \"$MODEL_TYPE\"" ' + BEGIN { RS="},"; FS="," } + $0 ~ model { print $0 } + ') + + if [ -z "$MODEL_INFO" ]; then + FOUND=false + log_verbose "⚠️ WARNING: No quota information found for model: $MODEL_NAME in region: $REGION for model type: $MODEL_TYPE." + continue + fi + + if [ -n "$MODEL_INFO" ]; then + FOUND=true + CURRENT_VALUE=$(echo "$MODEL_INFO" | awk -F': ' '/"currentvalue"/ {print $2}' | tr -d ',' | tr -d ' ') + LIMIT=$(echo "$MODEL_INFO" | awk -F': ' '/"limit"/ {print $2}' | tr -d ',' | tr -d ' ') + + CURRENT_VALUE=${CURRENT_VALUE:-0} + LIMIT=${LIMIT:-0} + + CURRENT_VALUE=$(echo "$CURRENT_VALUE" | cut -d'.' -f1) + LIMIT=$(echo "$LIMIT" | cut -d'.' -f1) + + AVAILABLE=$((LIMIT - CURRENT_VALUE)) + log_verbose "βœ… Model: $MODEL_TYPE | Used: $CURRENT_VALUE | Limit: $LIMIT | Available: $AVAILABLE" + + if [ "$AVAILABLE" -ge "$REQUIRED_CAPACITY" ]; then + FOUND=true + if [ "$MODEL_NAME" = "text-embedding-ada-002" ]; then + TEXT_EMBEDDING_AVAILABLE=true + fi + AT_LEAST_ONE_MODEL_AVAILABLE=true + TEMP_TABLE_ROWS+=("$(printf "| %-4s | %-20s | %-43s | %-10s | %-10s | %-10s |" "$INDEX" "$REGION" "$MODEL_TYPE" "$LIMIT" "$CURRENT_VALUE" "$AVAILABLE")") + else + INSUFFICIENT_QUOTA=true + fi + fi + + if [ "$FOUND" = false ]; then + log_verbose "❌ No models found for model: $MODEL_NAME in region: $REGION (${MODEL_TYPES[*]})" + + elif [ "$INSUFFICIENT_QUOTA" = true ]; then + log_verbose "⚠️ Model $MODEL_NAME in region: $REGION has insufficient quota (${MODEL_TYPES[*]})." + fi + done + done + +if { [ "$IS_USER_PROVIDED_PAIRS" = true ] && [ "$INSUFFICIENT_QUOTA" = false ] && [ "$FOUND" = true ]; } || { [ "$TEXT_EMBEDDING_AVAILABLE" = true ] && { [ "$APPLY_OR_CONDITION" != true ] || [ "$AT_LEAST_ONE_MODEL_AVAILABLE" = true ]; }; }; then + VALID_REGIONS+=("$REGION") + TABLE_ROWS+=("${TEMP_TABLE_ROWS[@]}") + INDEX=$((INDEX + 1)) + elif [ ${#USER_PROVIDED_PAIRS[@]} -eq 0 ]; then + echo "🚫 Skipping $REGION as it does not meet quota requirements." + fi + +done + +if [ ${#TABLE_ROWS[@]} -eq 0 ]; then + echo "--------------------------------------------------------------------------------------------------------------------" + + echo "❌ No regions have sufficient quota for all required models. Please request a quota increase: https://aka.ms/oai/stuquotarequest" +else + echo "---------------------------------------------------------------------------------------------------------------------" + printf "| %-4s | %-20s | %-43s | %-10s | %-10s | %-10s |\n" "No." "Region" "Model Name" "Limit" "Used" "Available" + echo "---------------------------------------------------------------------------------------------------------------------" + for ROW in "${TABLE_ROWS[@]}"; do + echo "$ROW" + done + echo "---------------------------------------------------------------------------------------------------------------------" + echo "➑️ To request a quota increase, visit: https://aka.ms/oai/stuquotarequest" +fi + +echo "βœ… Script completed." diff --git a/scripts/run_create_table_script.sh b/scripts/run_create_table_script.sh index 8777ecbc5..90a20c50e 100644 --- a/scripts/run_create_table_script.sh +++ b/scripts/run_create_table_script.sh @@ -23,6 +23,7 @@ az postgres flexible-server firewall-rule create --resource-group $resourceGroup # Download the create table python file curl --output "create_postgres_tables.py" ${baseUrl}"scripts/data_scripts/create_postgres_tables.py" +curl --output "azure_credential_utils.py" ${baseUrl}"scripts/data_scripts/azure_credential_utils.py" # Download the requirement file curl --output "$requirementFile" "$requirementFileUrl" diff --git a/tests/e2e-test/pages/adminPage.py b/tests/e2e-test/pages/adminPage.py index e345c5633..ff3efc55d 100644 --- a/tests/e2e-test/pages/adminPage.py +++ b/tests/e2e-test/pages/adminPage.py @@ -28,3 +28,8 @@ def __init__(self, page): def click_delete_data_tab(self): self.page.locator(self.DELETE_DATA_TAB).click() self.page.wait_for_timeout(5000) + + def assert_admin_page_title(self, admin_page): + actual_title = self.page.locator(admin_page.ADMIN_PAGE_TITLE).text_content() + expected_title = admin_page.ADMIN_PAGE_TITLE + assert expected_title == actual_title, f"Expected title: {expected_title}, Found: {actual_title}" diff --git a/tests/e2e-test/pages/webUserPage.py b/tests/e2e-test/pages/webUserPage.py index 7abcc8ae6..1faabd089 100644 --- a/tests/e2e-test/pages/webUserPage.py +++ b/tests/e2e-test/pages/webUserPage.py @@ -1,7 +1,6 @@ +from asyncio.log import logger from base.base import BasePage from playwright.sync_api import expect - - class WebUserPage(BasePage): WEB_PAGE_TITLE = "//h3[text()='Azure AI']" TYPE_QUESTION_TEXT_AREA = "//textarea[contains(@placeholder,'Type a new question')]" @@ -22,6 +21,9 @@ class WebUserPage(BasePage): TOGGLE_CITATIONS_LIST = "[data-testid='toggle-citations-list']" CITATIONS_CONTAINER = "[data-testid='citations-container']" CITATION_BLOCK = "[data-testid='citation-block']" + SHOW_CHAT_HISTORY_BUTTON="//span[text()='Show Chat History']" + HIDE_CHAT_HISTORY_BUTTON = "//span[text()='Hide Chat History']" + CHAT_HISTORY_ITEM = "//div[@aria-label='chat history item']" def __init__(self, page): self.page = page @@ -53,15 +55,28 @@ def click_clear_chat_icon(self): self.page.locator(self.CLEAR_CHAT_ICON).click() def show_chat_history(self): - self.page.locator(self.SHOW_CHAT_HISTORY).click() - self.page.wait_for_load_state("networkidle") - self.page.wait_for_timeout(2000) - expect(self.page.locator(self.CHAT_HISTORY_NAME)).to_be_visible() + """Click to show chat history if the button is visible.""" + show_button = self.page.locator(self.SHOW_CHAT_HISTORY_BUTTON) + if show_button.is_visible(): + show_button.click() + self.page.wait_for_timeout(2000) + expect(self.page.locator(self.CHAT_HISTORY_ITEM)).to_be_visible() + else: + logger.info("'Show' button not visible β€” chat history may already be shown.") + + # def show_chat_history(self): + # self.page.wait_for_selector(self.SHOW_CHAT_HISTORY_BUTTON) + # self.page.locator(self.SHOW_CHAT_HISTORY_BUTTON).click() + # self.page.wait_for_timeout(1000) def close_chat_history(self): - self.page.locator(self.CHAT_CLOSE_ICON).click() - self.page.wait_for_load_state("networkidle") - self.page.wait_for_timeout(2000) + """Click to close chat history if visible.""" + hide_button = self.page.locator(self.HIDE_CHAT_HISTORY_BUTTON) + if hide_button.is_visible(): + hide_button.click() + self.page.wait_for_timeout(2000) + else: + logger.info("Hide button not visible. Chat history might already be closed.") def delete_chat_history(self): self.page.locator(self.SHOW_CHAT_HISTORY).click() @@ -69,8 +84,8 @@ def delete_chat_history(self): chat_history = self.page.locator("//span[contains(text(),'No chat history.')]") if chat_history.is_visible(): self.page.wait_for_load_state("networkidle") - self.page.wait_for_timeout(2000) - self.page.get_by_label("hide button").click() + self.page.locator("button[title='Hide']").wait_for(state="visible", timeout=5000) + self.page.locator("button[title='Hide']").click() else: self.page.locator(self.CHAT_HISTORY_OPTIONS).click() diff --git a/tests/e2e-test/pytest.ini b/tests/e2e-test/pytest.ini index 76eb64fc7..ead15695a 100644 --- a/tests/e2e-test/pytest.ini +++ b/tests/e2e-test/pytest.ini @@ -3,4 +3,4 @@ log_cli = true log_cli_level = INFO log_file = logs/tests.log log_file_level = INFO -addopts = -p no:warnings +addopts = -p no:warnings --tb=short diff --git a/tests/e2e-test/requirements.txt b/tests/e2e-test/requirements.txt index 7aad0cfb2..37159fb19 100644 --- a/tests/e2e-test/requirements.txt +++ b/tests/e2e-test/requirements.txt @@ -3,4 +3,5 @@ pytest-reporter-html1 python-dotenv pytest-check pytest-html -py \ No newline at end of file +py +beautifulsoup4 diff --git a/tests/e2e-test/tests/conftest.py b/tests/e2e-test/tests/conftest.py index 31a3bc295..55ed8f1d7 100644 --- a/tests/e2e-test/tests/conftest.py +++ b/tests/e2e-test/tests/conftest.py @@ -1,54 +1,92 @@ -import os - import pytest -from config.constants import * +import os +import io +import logging +import atexit +from bs4 import BeautifulSoup from playwright.sync_api import sync_playwright -from py.xml import html # type: ignore +from config.constants import * +log_streams = {} +# ---------- FIXTURE: Login and Logout Setup ---------- @pytest.fixture(scope="session") def login_logout(): - # perform login and browser close once in a session with sync_playwright() as p: browser = p.chromium.launch(headless=False, args=["--start-maximized"]) context = browser.new_context(no_viewport=True) context.set_default_timeout(80000) page = context.new_page() - # Navigate to the login URL + + # Load URL and wait page.goto(WEB_URL) - # Wait for the login form to appear page.wait_for_load_state("networkidle") page.wait_for_timeout(5000) - # login to web url with username and password - # login_page = LoginPage(page) + + # Uncomment if authentication is needed # load_dotenv() + # login_page = LoginPage(page) # login_page.authenticate(os.getenv('user_name'), os.getenv('pass_word')) + yield page browser.close() - +# ---------- HTML Report Title ---------- @pytest.hookimpl(tryfirst=True) def pytest_html_report_title(report): report.title = "Test_Automation_Chat_with_your_Data" +# ---------- Logging Setup per Test ---------- +@pytest.hookimpl(tryfirst=True) +def pytest_runtest_setup(item): + stream = io.StringIO() + handler = logging.StreamHandler(stream) + handler.setLevel(logging.INFO) + logger = logging.getLogger() + logger.addHandler(handler) + log_streams[item.nodeid] = (handler, stream) -# Add a column for descriptions -def pytest_html_results_table_header(cells): - cells.insert(1, html.th("Description")) - - -def pytest_html_results_table_row(report, cells): - cells.insert( - 1, html.td(report.description if hasattr(report, "description") else "") - ) - - -# Add logs and docstring to report +# ---------- Attach Logs to HTML Report ---------- @pytest.hookimpl(hookwrapper=True) def pytest_runtest_makereport(item, call): outcome = yield report = outcome.get_result() - report.description = str(item.function.__doc__) - os.makedirs("logs", exist_ok=True) - extra = getattr(report, "extra", []) - report.extra = extra + + if report.when == "call": + question_logs = getattr(item, "_question_logs", None) + if question_logs: + for i, (question, logs) in enumerate(question_logs.items(), start=1): + report.sections.append((f"Q{i:02d}: {question}", logs)) + else: + log = getattr(item, "_captured_log", None) + if log: + report.sections.append(("Captured Log", log)) + +# ---------- Optional: Clean Up Node IDs for Parametrized Prompts ---------- +def pytest_collection_modifyitems(items): + for item in items: + if hasattr(item, 'callspec') and "prompt" in item.callspec.params: + item._nodeid = item.callspec.params["prompt"] + +# ---------- Rename Duration Column in HTML Report ---------- +def rename_duration_column(): + report_path = os.path.abspath("report.html") + if not os.path.exists(report_path): + print("Report file not found, skipping column rename.") + return + + with open(report_path, 'r', encoding='utf-8') as f: + soup = BeautifulSoup(f, 'html.parser') + + headers = soup.select('table#results-table thead th') + for th in headers: + if th.text.strip() == 'Duration': + th.string = 'Execution Time' + break + else: + print("'Duration' column not found in report.") + + with open(report_path, 'w', encoding='utf-8') as f: + f.write(str(soup)) + +atexit.register(rename_duration_column) diff --git a/tests/e2e-test/tests/test_chat_with_your_data.py b/tests/e2e-test/tests/test_chat_with_your_data.py index def96c501..84e6905ee 100644 --- a/tests/e2e-test/tests/test_chat_with_your_data.py +++ b/tests/e2e-test/tests/test_chat_with_your_data.py @@ -1,4 +1,7 @@ import logging +import time +import pytest +import io from config.constants import * from pages.adminPage import AdminPage @@ -6,72 +9,144 @@ logger = logging.getLogger(__name__) +# === Step Functions === -def test_golden_path_web_page_demo_script(login_logout): - """Validate Golden path test case for Chat with your Data""" - page = login_logout +def validate_admin_page_loaded(page, admin_page, home_page): page.goto(ADMIN_URL) - logger.info("Step 1: Validate Admin page is loaded.") - admin_page = AdminPage(page) - assert ( - admin_page_title == page.locator(admin_page.ADMIN_PAGE_TITLE).text_content() - ), "page title not found" - logger.info("Step 2: Validate Files are uploaded or not") + actual_title = page.locator(admin_page.ADMIN_PAGE_TITLE).text_content() + assert actual_title == "Chat with your data Solution Accelerator", "Admin page title mismatch" + +def validate_files_are_uploaded(page, admin_page, home_page): admin_page.click_delete_data_tab() - assert ( - page.locator(admin_page.DELETE_CHECK_BOXES).count() >= 1 - ), "Files are not uploaded." - logger.info("Step 3: Validate Web page is loaded.") + checkbox_count = page.locator(admin_page.DELETE_CHECK_BOXES).count() + assert checkbox_count >= 1, "No files available to delete" + +def goto_web_page(page, admin_page, home_page): page.goto(WEB_URL) - home_page = WebUserPage(page) - logger.info("Step 5: Validate Chat history has been deleted.") + +def delete_chat_history(page, admin_page, home_page): home_page.delete_chat_history() - failed_questions = [] - logger.info("Step 6: Validate Golden Path prompts response") - - def ask_question_and_check(question, attempt): - home_page.wait_for_load(4000) - home_page.enter_a_question(question) - home_page.click_send_button() - home_page.validate_response_status(question) - - response_text = page.locator(home_page.ANSWER_TEXT) - response_count = response_text.count() - - if home_page.has_reference_link(): - logger.info("Step 6.1: Reference link found. Opening citation.") - home_page.click_reference_link_in_response() - logger.info("Step 6.2: Closing citation.") - home_page.close_citation() - - if response_count == 0: - return False # no response found - - response_text_content = response_text.nth(response_count - 1).text_content() - - if response_text_content == invalid_response: - print(f"[Attempt {attempt}] Invalid response({response_text_content}) for prompt: {question}") - return False - return True - - # First run through all questions - for question in questions: - if not ask_question_and_check(question, attempt=1): - failed_questions.append(question) - - # Retry failed questions once more - if failed_questions: - logger.info("Step 7: Retry failed question one more time.") - for question in failed_questions: - if not ask_question_and_check(question, attempt=2): - home_page.soft_assert( - False, - f"Failed after retry- Invalid response for prompt: {question}", - ) - - logger.info("Step 8: Validate chat history.") +# === Golden Path Step Definitions === + +golden_path_functions = [ + validate_admin_page_loaded, + validate_files_are_uploaded, + goto_web_page, + delete_chat_history, +] + +step_descriptions = [ + "Validate Admin page is loaded", + "Validate files are uploaded", + "Validate Web page is loaded", + "Delete chat history" +] + +golden_path_steps = list(zip(step_descriptions, golden_path_functions)) + +# === Golden Path Test Execution === + +@pytest.mark.parametrize("step_desc, action", golden_path_steps, ids=[desc for desc, _ in golden_path_steps]) +def test_golden_path_steps(login_logout, step_desc, action, request): + request.node._nodeid = step_desc + page = login_logout + admin_page = AdminPage(page) + home_page = WebUserPage(page) + + log_capture = io.StringIO() + handler = logging.StreamHandler(log_capture) + logger.addHandler(handler) + + logger.info(f"🟒 START: {step_desc}") + start = time.time() + + try: + result = action(page, admin_page, home_page) + if isinstance(result, tuple): + for func in result: + if callable(func): + func() + except AssertionError as e: + logger.error(f"❌ FAILED: {step_desc} - {str(e)}") + raise + finally: + duration = time.time() - start + logger.info(f"βœ… END: {step_desc} | Execution Time: {duration:.2f}s") + logger.removeHandler(handler) + setattr(request.node, "_captured_log", log_capture.getvalue()) + + +# === Each Question as a Separate Test Case === + +@pytest.mark.parametrize("question", questions, ids=[f"Validate response for prompt : {q}" for q in questions]) +def test_gp_question(login_logout, question, request): + page = login_logout + home_page = WebUserPage(page) + request.node._nodeid = f"Validate response for prompt : {question}" + + log_capture = io.StringIO() + handler = logging.StreamHandler(log_capture) + logger.addHandler(handler) + + success = False + start_time = time.time() + + try: + for attempt in range(1, 3): + logger.info(f"[GP] [{question}] Attempt {attempt} - START") + + try: + home_page.wait_for_load(4000) + home_page.enter_a_question(question) + home_page.click_send_button() + home_page.validate_response_status(question) + + response_text = page.locator(home_page.ANSWER_TEXT) + response_count = response_text.count() + + if response_count == 0: + logger.warning(f"[GP] [{question}] No response returned.") + continue + + if home_page.has_reference_link(): + logger.info(f"[GP] [{question}] Reference link found. Opening citation.") + home_page.click_reference_link_in_response() + logger.info(f"[GP] [{question}] Closing citation.") + home_page.close_citation() + + response_content = response_text.nth(response_count - 1).text_content().strip() + + if response_content == invalid_response: + logger.warning(f"[GP] [{question}] Invalid response: {response_content}") + continue + + logger.info(f"[GP] [{question}] Valid response received.") + success = True + break + + except Exception as e: + logger.error(f"[GP] [{question}] Exception: {str(e)}") + + if not success: + pytest.fail(f"[GP] [{question}] Failed after 2 attempts.") + + finally: + duration = time.time() - start_time + logger.info(f"[GP] [{question}] Execution Time: {duration:.2f}s") + logger.removeHandler(handler) + setattr(request.node, "_captured_log", log_capture.getvalue()) + + +# === Chat History Test === + +def test_validate_chat_history(login_logout, request): + request.node._nodeid = "Validate chat history shown and closed" + page = login_logout + home_page = WebUserPage(page) + + logger.info("[FINAL] Showing chat history after all questions executed.") home_page.show_chat_history() - logger.info("Step 9: Validate chat history closed.") + + logger.info("[FINAL] Closing chat history.") home_page.close_chat_history() - home_page.assert_all() diff --git a/tests/e2e-test/tests/test_poc_chat_with_your_data.py b/tests/e2e-test/tests/test_poc_chat_with_your_data.py deleted file mode 100644 index e253d39c4..000000000 --- a/tests/e2e-test/tests/test_poc_chat_with_your_data.py +++ /dev/null @@ -1,71 +0,0 @@ -import logging - -from config.constants import * -from pages.adminPage import AdminPage -from pages.webUserPage import WebUserPage - -logger = logging.getLogger(__name__) - - -def test_golden_path_web_page_demo_script(login_logout): - """Validate Golden path test case for Chat with your Data""" - page = login_logout - page.goto(ADMIN_URL) - logger.info("Step 1: Validate Admin page is loaded.") - admin_page = AdminPage(page) - assert ( - admin_page_title == page.locator(admin_page.ADMIN_PAGE_TITLE).text_content() - ), "page title not found" - logger.info("Step 2: Validate Files are uploaded or not") - admin_page.click_delete_data_tab() - assert ( - page.locator(admin_page.DELETE_CHECK_BOXES).count() >= 1 - ), "Files are not uploaded." - logger.info("Step 3: Validate Web page is loaded.") - page.goto(WEB_URL) - home_page = WebUserPage(page) - logger.info("Step 5: Validate Chat history has been deleted.") - home_page.delete_chat_history() - - failed_questions = [] - logger.info("Step 6: Validate Golden Path prompts response") - - def ask_question_and_check(question, attempt): - home_page.wait_for_load(4000) - home_page.enter_a_question(question) - home_page.click_send_button() - home_page.validate_response_status(question) - - response_text = page.locator(home_page.ANSWER_TEXT) - response_count = response_text.count() - - if response_count == 0: - return False # no response found - - response_text_content = response_text.nth(response_count - 1).text_content() - - if response_text_content == invalid_response: - print(f"[Attempt {attempt}] Invalid response for prompt: {question}") - return False - return True - - # First run through all questions - for question in questions: - if not ask_question_and_check(question, attempt=1): - failed_questions.append(question) - - # Retry failed questions once more - if failed_questions: - logger.info("Step 7: Retry failed question one more time.") - for question in failed_questions: - if not ask_question_and_check(question, attempt=2): - home_page.soft_assert( - False, - f"Failed after retry- Invalid response for prompt: {question}", - ) - - logger.info("Step 8: Validate chat history.") - home_page.show_chat_history() - logger.info("Step 9: Validate chat history closed.") - home_page.close_chat_history() - home_page.assert_all()