From 4f36121239ee09380126b8db3c0af144a0f08532 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Tue, 4 Nov 2025 12:03:22 +0530 Subject: [PATCH 01/32] Remove OpenAI API call from app.py --- src/App/app.py | 70 +++++++++++++++++++------------------------------- 1 file changed, 27 insertions(+), 43 deletions(-) diff --git a/src/App/app.py b/src/App/app.py index 9a9ea298..e157b6b1 100644 --- a/src/App/app.py +++ b/src/App/app.py @@ -9,9 +9,9 @@ from azure.identity import get_bearer_token_provider from backend.helpers.azure_credential_utils import get_azure_credential from azure.monitor.opentelemetry import configure_azure_monitor +from azure.ai.projects import AIProjectClient # from quart.sessions import SecureCookieSessionInterface -from openai import AsyncAzureOpenAI from opentelemetry import trace from opentelemetry.trace import Status, StatusCode from quart import ( @@ -129,7 +129,7 @@ async def assets(path): if DEBUG.lower() == "true": logging.basicConfig(level=logging.DEBUG) -USER_AGENT = "GitHubSampleWebApp/AsyncAzureOpenAI/1.0.0" +USER_AGENT = "GitHubSampleWebApp/AzureAIProjects/1.0.0" frontend_settings = { "auth_enabled": config.AUTH_ENABLED, @@ -163,9 +163,9 @@ def should_use_data(): SHOULD_USE_DATA = should_use_data() -# Initialize Azure OpenAI Client -def init_openai_client(use_data=SHOULD_USE_DATA): - azure_openai_client = None +# Initialize Azure AI Projects Client +def init_ai_projects_client(use_data=SHOULD_USE_DATA): + ai_projects_client = None try: # API version check if ( @@ -177,59 +177,42 @@ def init_openai_client(use_data=SHOULD_USE_DATA): ) # Endpoint - if not config.AZURE_OPENAI_ENDPOINT and not config.AZURE_OPENAI_RESOURCE: + if not config.AI_PROJECT_ENDPOINT: raise Exception( - "AZURE_OPENAI_ENDPOINT or AZURE_OPENAI_RESOURCE is required" + "AI_PROJECT_ENDPOINT is required for Azure AI Projects client" ) - endpoint = ( - config.AZURE_OPENAI_ENDPOINT - if config.AZURE_OPENAI_ENDPOINT - else f"https://{config.AZURE_OPENAI_RESOURCE}.openai.azure.com/" + # Authentication using managed identity + credential = get_azure_credential(config.MID_ID) + # Create AI Projects client + ai_projects_client = AIProjectClient( + endpoint=config.AI_PROJECT_ENDPOINT, + credential=credential, + api_version=config.AZURE_OPENAI_PREVIEW_API_VERSION, ) - # Authentication - aoai_api_key = config.AZURE_OPENAI_KEY - ad_token_provider = None - if not aoai_api_key: - logging.debug("No AZURE_OPENAI_KEY found, using Azure AD auth") - ad_token_provider = get_bearer_token_provider( - get_azure_credential(config.MID_ID), "https://cognitiveservices.azure.com/.default" - ) - - # Deployment - deployment = config.AZURE_OPENAI_MODEL - if not deployment: - raise Exception("AZURE_OPENAI_MODEL is required") - - # Default Headers - default_headers = {"x-ms-useragent": USER_AGENT} - - azure_openai_client = AsyncAzureOpenAI( - api_version=config.AZURE_OPENAI_PREVIEW_API_VERSION, - api_key=aoai_api_key, - azure_ad_token_provider=ad_token_provider, - default_headers=default_headers, - azure_endpoint=endpoint, + # Get the OpenAI client from the AI Projects client + openai_client = ai_projects_client.get_openai_client( + api_version=config.AZURE_OPENAI_PREVIEW_API_VERSION ) track_event_if_configured( - "AzureOpenAIClientInitialized", + "AzureAIProjectsClientInitialized", { "status": "success", - "endpoint": endpoint, - "use_api_key": bool(aoai_api_key), + "endpoint": config.AI_PROJECT_ENDPOINT, + "use_managed_identity": True, }, ) - - return azure_openai_client + + return openai_client except Exception as e: - logging.exception("Exception in Azure OpenAI initialization", e) + logging.exception("Exception in Azure AI Projects initialization", e) span = trace.get_current_span() if span is not None: span.record_exception(e) span.set_status(Status(StatusCode.ERROR, str(e))) - azure_openai_client = None + ai_projects_client = None raise e @@ -521,7 +504,7 @@ async def send_chat_request(request_body, request_headers): model_args = prepare_model_args(request_body, request_headers) try: - azure_openai_client = init_openai_client() + azure_openai_client = init_ai_projects_client() raw_response = ( await azure_openai_client.chat.completions.with_raw_response.create( **model_args @@ -1324,7 +1307,7 @@ async def generate_title(conversation_messages): messages.append({"role": "user", "content": title_prompt}) try: - azure_openai_client = init_openai_client(use_data=False) + azure_openai_client = init_ai_projects_client(use_data=False) response = await azure_openai_client.chat.completions.create( model=config.AZURE_OPENAI_MODEL, messages=messages, @@ -1333,6 +1316,7 @@ async def generate_title(conversation_messages): ) title = json.loads(response.choices[0].message.content)["title"] + return title except Exception: return messages[-2]["content"] From 89b7912760b076565f538e359d04be5323c8e219 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Tue, 4 Nov 2025 14:38:52 +0530 Subject: [PATCH 02/32] resolve pylint issues in app.py --- src/App/app.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/App/app.py b/src/App/app.py index e157b6b1..4effa705 100644 --- a/src/App/app.py +++ b/src/App/app.py @@ -6,7 +6,6 @@ import uuid from types import SimpleNamespace -from azure.identity import get_bearer_token_provider from backend.helpers.azure_credential_utils import get_azure_credential from azure.monitor.opentelemetry import configure_azure_monitor from azure.ai.projects import AIProjectClient @@ -191,7 +190,7 @@ def init_ai_projects_client(use_data=SHOULD_USE_DATA): api_version=config.AZURE_OPENAI_PREVIEW_API_VERSION, ) - # Get the OpenAI client from the AI Projects client + # Get the OpenAI client from the AI Projects client openai_client = ai_projects_client.get_openai_client( api_version=config.AZURE_OPENAI_PREVIEW_API_VERSION ) @@ -204,7 +203,7 @@ def init_ai_projects_client(use_data=SHOULD_USE_DATA): "use_managed_identity": True, }, ) - + return openai_client except Exception as e: logging.exception("Exception in Azure AI Projects initialization", e) From f033e81c104098356693e61a520d47cd17acb5a5 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Tue, 4 Nov 2025 17:20:06 +0530 Subject: [PATCH 03/32] update test_app.py file for init_openai method --- src/App/tests/test_app.py | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/src/App/tests/test_app.py b/src/App/tests/test_app.py index 8828f9b6..154fe715 100644 --- a/src/App/tests/test_app.py +++ b/src/App/tests/test_app.py @@ -8,7 +8,7 @@ delete_all_conversations, generate_title, init_cosmosdb_client, - init_openai_client, + init_ai_projects_client, stream_chat_request, ) from quart import Response @@ -78,15 +78,16 @@ def test_create_app(): assert "routes" in app.blueprints -@patch("app.get_bearer_token_provider") -@patch("app.AsyncAzureOpenAI") -def test_init_openai_client(mock_async_openai, mock_token_provider): - mock_token_provider.return_value = MagicMock() - mock_async_openai.return_value = MagicMock() +@patch("app.AIProjectClient") +def test_init_ai_projects_client(mock_ai_projects_client): + mock_project_instance = MagicMock() + mock_openai_client = MagicMock() + mock_project_instance.inference.get_azure_openai_client.return_value = mock_openai_client + mock_ai_projects_client.return_value = mock_project_instance - client = init_openai_client() + client = init_ai_projects_client() assert client is not None - mock_async_openai.assert_called_once() + mock_ai_projects_client.assert_called_once() @patch("app.CosmosConversationClient") @@ -1179,15 +1180,15 @@ async def test_add_conversation_conversation_not_found( @pytest.mark.asyncio -@patch("app.init_openai_client") -async def test_generate_title_success(mock_init_openai_client): +@patch("app.init_ai_projects_client") +async def test_generate_title_success(mock_init_ai_projects_client): mock_openai_client = AsyncMock() mock_openai_client.chat.completions.create.return_value = MagicMock( choices=[ MagicMock(message=MagicMock(content=json.dumps({"title": "Test Title"}))) ] ) - mock_init_openai_client.return_value = mock_openai_client + mock_init_ai_projects_client.return_value = mock_openai_client conversation_messages = [{"role": "user", "content": "Hello"}] title = await generate_title(conversation_messages) @@ -1195,11 +1196,11 @@ async def test_generate_title_success(mock_init_openai_client): @pytest.mark.asyncio -@patch("app.init_openai_client") -async def test_generate_title_exception(mock_init_openai_client): +@patch("app.init_ai_projects_client") +async def test_generate_title_exception(mock_init_ai_projects_client): mock_openai_client = AsyncMock() mock_openai_client.chat.completions.create.side_effect = Exception("API error") - mock_init_openai_client.return_value = mock_openai_client + mock_init_ai_projects_client.return_value = mock_openai_client conversation_messages = [{"role": "user", "content": "Hello"}] title = await generate_title(conversation_messages) From 6887b780350025a91631de92c2724dd45626c315 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Tue, 4 Nov 2025 17:28:10 +0530 Subject: [PATCH 04/32] update app.py to pass test file --- src/App/app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/App/app.py b/src/App/app.py index 4effa705..0f154083 100644 --- a/src/App/app.py +++ b/src/App/app.py @@ -206,7 +206,7 @@ def init_ai_projects_client(use_data=SHOULD_USE_DATA): return openai_client except Exception as e: - logging.exception("Exception in Azure AI Projects initialization", e) + logging.exception("Exception in Azure AI Projects initialization") span = trace.get_current_span() if span is not None: span.record_exception(e) @@ -247,7 +247,7 @@ def init_cosmosdb_client(): }, ) except Exception as e: - logging.exception("Exception in CosmosDB initialization", e) + logging.exception("Exception in CosmosDB initialization") span = trace.get_current_span() if span is not None: span.record_exception(e) From 618968063f0bf553515f874a67fa20d40194ce53 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Wed, 5 Nov 2025 08:47:15 +0530 Subject: [PATCH 05/32] Fix for test file --- src/App/tests/test_app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/App/tests/test_app.py b/src/App/tests/test_app.py index 154fe715..488662d1 100644 --- a/src/App/tests/test_app.py +++ b/src/App/tests/test_app.py @@ -48,6 +48,8 @@ def set_env_vars(): "backend.common.config.config.AZURE_COSMOSDB_ENABLE_FEEDBACK", True ), patch( "backend.common.config.config.AZURE_OPENAI_KEY", "valid_key" + ), patch( + "backend.common.config.config.AI_PROJECT_ENDPOINT", "https://test-ai-project.com/" ), patch( "backend.common.config.config.UI_TITLE", "Woodgrove Bank" ), patch( From c1325e8e5ac1a74c088af8fdacbfb0944fe9b154 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Wed, 5 Nov 2025 12:42:41 +0530 Subject: [PATCH 06/32] disable soft delete --- infra/main.bicep | 1 + infra/main.json | 1 + 2 files changed, 2 insertions(+) diff --git a/infra/main.bicep b/infra/main.bicep index 6d94c82a..ba705e30 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -927,6 +927,7 @@ module avmStorageAccount 'br/public:avm/res/storage/storage-account:0.20.0' = { blobServices: { corsRules: [] deleteRetentionPolicyEnabled: false + containerDeleteRetentionPolicyEnabled: false containers: [ { name: 'data' diff --git a/infra/main.json b/infra/main.json index 68ed7079..99402c6d 100644 --- a/infra/main.json +++ b/infra/main.json @@ -33821,6 +33821,7 @@ "value": { "corsRules": [], "deleteRetentionPolicyEnabled": false, + "containerDeleteRetentionPolicyEnabled": false, "containers": [ { "name": "data", From 20b125bc1e59a5c2df16f98fa888a6f3cf5e4d4a Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Wed, 5 Nov 2025 18:24:55 +0530 Subject: [PATCH 07/32] fetch upload/download data from block storage instead of data lake --- .../index_scripts/create_search_index.py | 44 +++++++-------- infra/scripts/process_sample_data.sh | 56 +++++++++---------- 2 files changed, 49 insertions(+), 51 deletions(-) diff --git a/infra/scripts/index_scripts/create_search_index.py b/infra/scripts/index_scripts/create_search_index.py index b0a56f11..87b3ef77 100644 --- a/infra/scripts/index_scripts/create_search_index.py +++ b/infra/scripts/index_scripts/create_search_index.py @@ -32,6 +32,7 @@ FileSystemClient, ) from openai import AzureOpenAI +from azure.storage.blob import BlobServiceClient # Get Azure Key Vault Client key_vault_name = "kv_to-be-replaced" #'nc6262-kv-2fpeafsylfd2e' @@ -199,16 +200,13 @@ def chunk_data(text): # paths = os.listdir(path_name) -account_url = f"https://{account_name}.dfs.core.windows.net" +account_url = f"https://{account_name}.blob.core.windows.net" +blob_service_client = BlobServiceClient(account_url, credential=credential) +container_client = blob_service_client.get_container_client(file_system_client_name) -service_client = DataLakeServiceClient( - account_url, credential=credential, api_version="2023-01-03" -) +print(f"Listing blobs under '{directory}' using BlobServiceClient...") +paths = [blob.name for blob in container_client.list_blobs(name_starts_with=directory)] -file_system_client = service_client.get_file_system_client(file_system_client_name) -directory_name = directory -paths = file_system_client.get_paths(path=directory_name) -print(paths) search_client = SearchClient(search_endpoint, index_name, credential) # index_client = SearchIndexClient(endpoint=search_endpoint, credential=credential) @@ -221,22 +219,22 @@ def chunk_data(text): # Read the CSV file into a Pandas DataFrame file_path = csv_file_name print(file_path) -file_client = file_system_client.get_file_client(file_path) -csv_file = file_client.download_file() -df_metadata = pd.read_csv(csv_file, encoding="utf-8") +blob_client = container_client.get_blob_client(file_path) +download_stream = blob_client.download_blob() +df_metadata = pd.read_csv(download_stream, encoding="utf-8") docs = [] counter = 0 -for path in paths: - # file_path = f'Data/{foldername}/meeting_transcripts/' + path - # with open(file_path, "r") as file: - # data = json.load(file) - file_client = file_system_client.get_file_client(path.name) - data_file = file_client.download_file() - data = json.load(data_file) - text = data["Content"] - - filename = path.name.split("/")[-1] +for blob_name in paths: + if not blob_name.endswith(".json"): + continue + + blob_client = container_client.get_blob_client(blob_name) + download_stream = blob_client.download_blob() + data = json.loads(download_stream.readall()) + text = data.get("Content", "") + + filename = blob_name.split("/")[-1] document_id = filename.replace(".json", "").replace("convo_", "") # print(document_id) df_file_metadata = df_metadata[ @@ -276,7 +274,7 @@ def chunk_data(text): "chunk_id": d["chunk_id"], "client_id": d["client_id"], "content": d["content"], - "sourceurl": path.name.split("/")[-1], + "sourceurl": blob_name.split("/")[-1], "contentVector": v_contentVector, } ) @@ -284,7 +282,7 @@ def chunk_data(text): if counter % 10 == 0: result = search_client.upload_documents(documents=docs) docs = [] - print(f" {str(counter)} uploaded") + print(f"{counter} documents uploaded...") # upload the last batch if docs != []: diff --git a/infra/scripts/process_sample_data.sh b/infra/scripts/process_sample_data.sh index a1cec54c..1ccaf9a0 100644 --- a/infra/scripts/process_sample_data.sh +++ b/infra/scripts/process_sample_data.sh @@ -332,41 +332,41 @@ get_values_from_azd_env() { } get_values_from_az_deployment() { - echo "Getting values from Azure deployment outputs..." - - deploymentName=$(az group show --name "$resourceGroupName" --query "tags.DeploymentName" -o tsv) - echo "Deployment Name (from tag): $deploymentName" - + echo "Getting values from Azure deployment outputs..." + + deploymentName=$(az group show --name "$resourceGroupName" --query "tags.DeploymentName" -o tsv) + echo "Deployment Name (from tag): $deploymentName" + echo "Fetching deployment outputs..." - + # Get all outputs deploymentOutputs=$(az deployment group show \ --name "$deploymentName" \ --resource-group "$resourceGroupName" \ --query "properties.outputs" -o json) - + # Extract each value - cosmosDbAccountName=$(echo "$deploymentOutputs" | grep -A 3 '"cosmosdB_ACCOUNT_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - storageAccount=$(echo "$deploymentOutputs" | grep -A 3 '"storagE_ACCOUNT_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - fileSystem=$(echo "$deploymentOutputs" | grep -A 3 '"storagE_CONTAINER_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - keyvaultName=$(echo "$deploymentOutputs" | grep -A 3 '"keY_VAULT_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - sqlServerName=$(echo "$deploymentOutputs" | grep -A 3 '"sqldB_SERVER_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - webAppManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedidentitY_WEBAPP_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - webAppManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedidentitY_WEBAPP_CLIENTID"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - SqlDatabaseName=$(echo "$deploymentOutputs" | grep -A 3 '"sqldB_DATABASE"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - sqlManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedidentitY_SQL_CLIENTID"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - sqlManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedidentitY_SQL_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - aiSearchName=$(echo "$deploymentOutputs" | grep -A 3 '"aI_SEARCH_SERVICE_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - aif_resource_id=$(echo "$deploymentOutputs" | grep -A 3 '"aI_FOUNDRY_RESOURCE_ID"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - - # Validate that we extracted all required values - if [ -z "$cosmosDbAccountName" ] || [ -z "$storageAccount" ] || [ -z "$fileSystem" ] || [ -z "$keyvaultName" ] || [ -z "$sqlServerName" ] || [ -z "$SqlDatabaseName" ] || [ -z "$sqlManagedIdentityClientId" ] || [ -z "$sqlManagedIdentityDisplayName" ] || [ -z "$aiSearchName" ] || [ -z "$aif_resource_id" ]; then - echo "Error: One or more required values could not be retrieved from deployment outputs." - return 1 - else - echo "All values retrieved successfully from deployment outputs." - return 0 - fi + cosmosDbAccountName=$(echo "$deploymentOutputs" | grep -A 3 '"cosmosDbAccountName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + storageAccount=$(echo "$deploymentOutputs" | grep -A 3 '"storageAccountName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fileSystem=$(echo "$deploymentOutputs" | grep -A 3 '"storageContainerName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + keyvaultName=$(echo "$deploymentOutputs" | grep -A 3 '"keyVaultName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + sqlServerName=$(echo "$deploymentOutputs" | grep -A 3 '"sqlDbServerName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + webAppManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentityWebAppName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + webAppManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentityWebAppClientId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + SqlDatabaseName=$(echo "$deploymentOutputs" | grep -A 3 '"sqlDbDatabase"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + sqlManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentitySqlClientId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + sqlManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentitySqlName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + aiSearchName=$(echo "$deploymentOutputs" | grep -A 3 '"aiSearchServiceName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + aif_resource_id=$(echo "$deploymentOutputs" | grep -A 3 '"aiFoundryResourceId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + + # Validate that we extracted all required values + if [ -z "$cosmosDbAccountName" ] || [ -z "$storageAccount" ] || [ -z "$fileSystem" ] || [ -z "$keyvaultName" ] || [ -z "$sqlServerName" ] || [ -z "$SqlDatabaseName" ] || [ -z "$sqlManagedIdentityClientId" ] || [ -z "$sqlManagedIdentityDisplayName" ] || [ -z "$aiSearchName" ] || [ -z "$aif_resource_id" ]; then + echo "Error: One or more required values could not be retrieved from deployment outputs." + return 1 + else + echo "All values retrieved successfully from deployment outputs." + return 0 + fi } get_values_from_user() { From ea2c82ac83ce0d0af068d596ec8d5213e3226e3b Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Wed, 5 Nov 2025 18:29:36 +0530 Subject: [PATCH 08/32] remove unnecessary flags --- infra/main.bicep | 1 - infra/main.json | 1 - 2 files changed, 2 deletions(-) diff --git a/infra/main.bicep b/infra/main.bicep index ba705e30..6d94c82a 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -927,7 +927,6 @@ module avmStorageAccount 'br/public:avm/res/storage/storage-account:0.20.0' = { blobServices: { corsRules: [] deleteRetentionPolicyEnabled: false - containerDeleteRetentionPolicyEnabled: false containers: [ { name: 'data' diff --git a/infra/main.json b/infra/main.json index 99402c6d..68ed7079 100644 --- a/infra/main.json +++ b/infra/main.json @@ -33821,7 +33821,6 @@ "value": { "corsRules": [], "deleteRetentionPolicyEnabled": false, - "containerDeleteRetentionPolicyEnabled": false, "containers": [ { "name": "data", From d2e473128f35e1a2729e4e3108fe39cefc6a8675 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Thu, 6 Nov 2025 15:21:22 +0530 Subject: [PATCH 09/32] Remove OpenAI API call reference from chat_with_data_plugin.py file --- src/App/backend/plugins/chat_with_data_plugin.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/App/backend/plugins/chat_with_data_plugin.py b/src/App/backend/plugins/chat_with_data_plugin.py index d028f296..d3fdb432 100644 --- a/src/App/backend/plugins/chat_with_data_plugin.py +++ b/src/App/backend/plugins/chat_with_data_plugin.py @@ -1,7 +1,6 @@ import logging from typing import Annotated -import openai from azure.ai.agents.models import ( Agent, AzureAISearchQueryType, @@ -223,17 +222,6 @@ async def get_answers_from_calltranscripts( logging.error(f"Error in get_answers_from_calltranscripts: {str(e)}") return "Error retrieving data from call transcripts" - def get_openai_client(self): - token_provider = get_bearer_token_provider( - get_azure_credential(config.MID_ID), "https://cognitiveservices.azure.com/.default" - ) - openai_client = openai.AzureOpenAI( - azure_endpoint=config.AZURE_OPENAI_ENDPOINT, - azure_ad_token_provider=token_provider, - api_version=config.AZURE_OPENAI_PREVIEW_API_VERSION, - ) - return openai_client - def get_project_openai_client(self): project = AIProjectClient( endpoint=config.AI_PROJECT_ENDPOINT, credential=get_azure_credential(config.MID_ID) From e1176ce67be67871e7969804dfcaab321b0c9c9c Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Thu, 6 Nov 2025 15:24:51 +0530 Subject: [PATCH 10/32] Remove unuse imports --- src/App/backend/plugins/chat_with_data_plugin.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/App/backend/plugins/chat_with_data_plugin.py b/src/App/backend/plugins/chat_with_data_plugin.py index d3fdb432..9ced6d9a 100644 --- a/src/App/backend/plugins/chat_with_data_plugin.py +++ b/src/App/backend/plugins/chat_with_data_plugin.py @@ -8,7 +8,6 @@ MessageRole, ) from azure.ai.projects import AIProjectClient -from azure.identity import get_bearer_token_provider from backend.helpers.azure_credential_utils import get_azure_credential from semantic_kernel.functions.kernel_function_decorator import kernel_function From 5a71a5fc500c96e331d8298698c11c76edf426cf Mon Sep 17 00:00:00 2001 From: Thanusree-Microsoft <168087422+Thanusree-Microsoft@users.noreply.github.com> Date: Thu, 6 Nov 2025 17:40:09 +0530 Subject: [PATCH 11/32] Update SampleQuestions.md with response time note (#736) Added a note about average response time and formatted the document. Co-authored-by: Prajwal-Microsoft --- docs/SampleQuestions.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/SampleQuestions.md b/docs/SampleQuestions.md index 4a94b6e1..146b0869 100644 --- a/docs/SampleQuestions.md +++ b/docs/SampleQuestions.md @@ -2,6 +2,8 @@ To help you get started, here are some **Sample Prompts** you can ask after selecting the **Karen Berg** client: +> _Note: Average response time is 08-30 seconds._
+ 1. Click on **Karen Berg** client. 2. Ask the following questions: - What were Karen's concerns during our last meeting? From 7910fedd49b498a439c63596e6d0cd0f490dc1d3 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Thu, 6 Nov 2025 18:14:23 +0530 Subject: [PATCH 12/32] remove openai from requirements.txt --- src/App/requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/src/App/requirements.txt b/src/App/requirements.txt index ccf132f7..d7e42dad 100644 --- a/src/App/requirements.txt +++ b/src/App/requirements.txt @@ -1,7 +1,6 @@ # Core requirements azure-identity==1.25.0 # Flask[async]==2.3.2 -openai==2.0.1 azure-storage-blob==12.26.0 python-dotenv==1.1.1 azure-cosmos==4.9.0 From 32d2ce4afedaa5d64fb02881385bf54ce83cba9a Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Thu, 6 Nov 2025 18:26:41 +0530 Subject: [PATCH 13/32] updated test file --- .../plugins/test_chat_with_data_plugin.py | 40 +++++++++---------- 1 file changed, 18 insertions(+), 22 deletions(-) diff --git a/src/App/tests/backend/plugins/test_chat_with_data_plugin.py b/src/App/tests/backend/plugins/test_chat_with_data_plugin.py index cc1e9191..554115b4 100644 --- a/src/App/tests/backend/plugins/test_chat_with_data_plugin.py +++ b/src/App/tests/backend/plugins/test_chat_with_data_plugin.py @@ -13,39 +13,35 @@ def setup_method(self): self.plugin = ChatWithDataPlugin() @patch("backend.plugins.chat_with_data_plugin.config") - @patch("backend.plugins.chat_with_data_plugin.openai.AzureOpenAI") - @patch("backend.plugins.chat_with_data_plugin.get_bearer_token_provider") + @patch("backend.plugins.chat_with_data_plugin.AIProjectClient") @patch("backend.plugins.chat_with_data_plugin.get_azure_credential") - def test_get_openai_client_success( - self, - mock_default_credential, - mock_token_provider, - mock_azure_openai, - mock_config, + def test_get_project_openai_client_success_updated( + self, mock_default_credential, mock_ai_project_client, mock_config ): - """Test successful creation of OpenAI client with AAD authentication.""" + """Test successful creation of project OpenAI client after removing direct OpenAI client method.""" # Mock config values - mock_config.AZURE_OPENAI_ENDPOINT = "https://test.openai.azure.com" + mock_config.AI_PROJECT_ENDPOINT = "https://test.ai.azure.com" mock_config.AZURE_OPENAI_PREVIEW_API_VERSION = "2025-04-01-preview" - mock_client = MagicMock() - mock_azure_openai.return_value = mock_client mock_credential = MagicMock() mock_default_credential.return_value = mock_credential - mock_token = MagicMock() - mock_token_provider.return_value = mock_token - result = self.plugin.get_openai_client() + mock_project_instance = MagicMock() + mock_openai_client = MagicMock() + mock_project_instance.inference.get_azure_openai_client.return_value = ( + mock_openai_client + ) + mock_ai_project_client.return_value = mock_project_instance + + result = self.plugin.get_project_openai_client() - assert result == mock_client + assert result == mock_openai_client mock_default_credential.assert_called_once() - mock_token_provider.assert_called_once_with( - mock_credential, "https://cognitiveservices.azure.com/.default" + mock_ai_project_client.assert_called_once_with( + endpoint="https://test.ai.azure.com", credential=mock_credential ) - mock_azure_openai.assert_called_once_with( - azure_endpoint="https://test.openai.azure.com", - azure_ad_token_provider=mock_token, - api_version="2025-04-01-preview", + mock_project_instance.inference.get_azure_openai_client.assert_called_once_with( + api_version="2025-04-01-preview" ) @patch("backend.plugins.chat_with_data_plugin.config") From 797ea8b30593df3b281a406fde961488f106e25c Mon Sep 17 00:00:00 2001 From: Pavan-Microsoft Date: Fri, 7 Nov 2025 11:09:29 +0530 Subject: [PATCH 14/32] fix: update API version to 2025-04-01 and adjust skuCapacity in main.bicep --- infra/main.bicep | 2 +- infra/main.json | 122 +++++++++++++++++++++++------------------------ 2 files changed, 62 insertions(+), 62 deletions(-) diff --git a/infra/main.bicep b/infra/main.bicep index 6d94c82a..4de7551f 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -1066,7 +1066,7 @@ module webServerFarm 'br/public:avm/res/web/serverfarm:0.5.0' = { diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null // WAF aligned configuration for Scalability skuName: enableScalability || enableRedundancy ? 'P1v3' : 'B3' - skuCapacity: enableScalability ? 3 : 1 + skuCapacity: enableScalability ? 1 : 1 // WAF aligned configuration for Redundancy zoneRedundant: enableRedundancy ? true : false } diff --git a/infra/main.json b/infra/main.json index 68ed7079..923ba229 100644 --- a/infra/main.json +++ b/infra/main.json @@ -5,8 +5,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "17524393707520710386" + "version": "0.38.33.27573", + "templateHash": "13493219753752927332" } }, "parameters": { @@ -459,7 +459,7 @@ "logAnalyticsWorkspace": { "condition": "[and(parameters('enableMonitoring'), not(variables('useExistingLogAnalytics')))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.operational-insights.workspace.{0}', variables('logAnalyticsWorkspaceResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -3565,7 +3565,7 @@ "applicationInsights": { "condition": "[parameters('enableMonitoring')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.insights.component.{0}', variables('applicationInsightsResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -4295,7 +4295,7 @@ }, "userAssignedIdentity": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.managed-identity.user-assigned-identity.{0}', variables('userAssignedIdentityResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -4777,7 +4777,7 @@ }, "sqlUserAssignedIdentity": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.managed-identity.user-assigned-identity.{0}', variables('sqlUserAssignedIdentityResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -5260,7 +5260,7 @@ "virtualNetwork": { "condition": "[parameters('enablePrivateNetworking')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('module.virtualNetwork.{0}', variables('solutionSuffix')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -5297,8 +5297,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "15908341678380884075" + "version": "0.38.33.27573", + "templateHash": "1734974014097019118" } }, "definitions": { @@ -5691,7 +5691,7 @@ }, "condition": "[not(empty(tryGet(parameters('subnets')[copyIndex()], 'networkSecurityGroup')))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.network.network-security-group.{0}.{1}', tryGet(parameters('subnets')[copyIndex()], 'networkSecurityGroup', 'name'), parameters('resourceSuffix')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -6343,7 +6343,7 @@ }, "virtualNetwork": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.network.virtual-network.{0}', parameters('name')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -8070,7 +8070,7 @@ "bastionHost": { "condition": "[parameters('enablePrivateNetworking')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.network.bastion-host.{0}', variables('bastionHostName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -9389,7 +9389,7 @@ "jumpboxVM": { "condition": "[parameters('enablePrivateNetworking')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.compute.virtual-machine.{0}', variables('jumpboxVmName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -17739,7 +17739,7 @@ }, "condition": "[and(parameters('enablePrivateNetworking'), or(empty(parameters('existingFoundryProjectResourceId')), not(contains(variables('aiRelatedDnsZoneIndices'), copyIndex()))))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('avm.res.network.private-dns-zone.{0}', split(variables('privateDnsZones')[copyIndex()], '.')[1])]", "properties": { "expressionEvaluationOptions": { @@ -20906,7 +20906,7 @@ }, "keyvault": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.key-vault.vault.{0}', variables('keyVaultName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -24134,7 +24134,7 @@ "aiFoundryAiServices": { "condition": "[variables('aiFoundryAIservicesEnabled')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.cognitive-services.account.{0}', variables('aiFoundryAiServicesResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -24249,8 +24249,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "9573727846743928038" + "version": "0.38.33.27573", + "templateHash": "12531718623039828267" }, "name": "Cognitive Services", "description": "This module deploys a Cognitive Service." @@ -25430,7 +25430,7 @@ "cognitive_service_dependencies": { "condition": "[not(variables('useExistingService'))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('cognitive_service_dependencies-{0}', uniqueString('cognitive_service_dependencies', deployment().name))]", "properties": { "expressionEvaluationOptions": { @@ -25482,8 +25482,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "16444475951283055894" + "version": "0.38.33.27573", + "templateHash": "1396927448823284485" } }, "definitions": { @@ -26522,7 +26522,7 @@ "count": "[length(coalesce(parameters('privateEndpoints'), createArray()))]" }, "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('{0}-cognitiveService-PrivateEndpoint-{1}', uniqueString(deployment().name, parameters('location')), copyIndex())]", "subscriptionId": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[2]]", "resourceGroup": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[4]]", @@ -27273,7 +27273,7 @@ "secretsExport": { "condition": "[not(equals(parameters('secretsExportConfiguration'), null()))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('{0}-secrets-kv', uniqueString(deployment().name, parameters('location')))]", "subscriptionId": "[split(tryGet(parameters('secretsExportConfiguration'), 'keyVaultResourceId'), '/')[2]]", "resourceGroup": "[split(tryGet(parameters('secretsExportConfiguration'), 'keyVaultResourceId'), '/')[4]]", @@ -27297,8 +27297,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "2491273843075489892" + "version": "0.38.33.27573", + "templateHash": "7420599935384266971" } }, "definitions": { @@ -27417,7 +27417,7 @@ "aiProject": { "condition": "[or(not(empty(parameters('projectName'))), not(empty(parameters('existingFoundryProjectResourceId'))))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('{0}-ai-project-{1}-deployment', parameters('name'), parameters('projectName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -27451,8 +27451,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "346451728741152022" + "version": "0.38.33.27573", + "templateHash": "5676565623284126112" } }, "definitions": { @@ -27631,7 +27631,7 @@ "existing_cognitive_service_dependencies": { "condition": "[variables('useExistingService')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('existing_cognitive_service_dependencies-{0}', uniqueString('existing_cognitive_service_dependencies', deployment().name))]", "subscriptionId": "[variables('existingCognitiveServiceDetails')[2]]", "resourceGroup": "[variables('existingCognitiveServiceDetails')[4]]", @@ -27688,8 +27688,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "16444475951283055894" + "version": "0.38.33.27573", + "templateHash": "1396927448823284485" } }, "definitions": { @@ -28728,7 +28728,7 @@ "count": "[length(coalesce(parameters('privateEndpoints'), createArray()))]" }, "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('{0}-cognitiveService-PrivateEndpoint-{1}', uniqueString(deployment().name, parameters('location')), copyIndex())]", "subscriptionId": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[2]]", "resourceGroup": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[4]]", @@ -29479,7 +29479,7 @@ "secretsExport": { "condition": "[not(equals(parameters('secretsExportConfiguration'), null()))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('{0}-secrets-kv', uniqueString(deployment().name, parameters('location')))]", "subscriptionId": "[split(tryGet(parameters('secretsExportConfiguration'), 'keyVaultResourceId'), '/')[2]]", "resourceGroup": "[split(tryGet(parameters('secretsExportConfiguration'), 'keyVaultResourceId'), '/')[4]]", @@ -29503,8 +29503,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "2491273843075489892" + "version": "0.38.33.27573", + "templateHash": "7420599935384266971" } }, "definitions": { @@ -29623,7 +29623,7 @@ "aiProject": { "condition": "[or(not(empty(parameters('projectName'))), not(empty(parameters('existingFoundryProjectResourceId'))))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('{0}-ai-project-{1}-deployment', parameters('name'), parameters('projectName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -29657,8 +29657,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "346451728741152022" + "version": "0.38.33.27573", + "templateHash": "5676565623284126112" } }, "definitions": { @@ -29915,9 +29915,9 @@ } }, "dependsOn": [ - "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').aiServices)]", "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').openAI)]", "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').cognitiveServices)]", + "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').aiServices)]", "logAnalyticsWorkspace", "userAssignedIdentity", "virtualNetwork" @@ -29925,7 +29925,7 @@ }, "cosmosDb": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.document-db.database-account.{0}', variables('cosmosDbResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -33765,7 +33765,7 @@ }, "avmStorageAccount": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.storage.storage-account.{0}', variables('storageAccountName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -39524,8 +39524,8 @@ } }, "dependsOn": [ - "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').storageQueue)]", "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').storageBlob)]", + "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').storageQueue)]", "keyvault", "userAssignedIdentity", "virtualNetwork" @@ -39533,7 +39533,7 @@ }, "saveStorageAccountSecretsInKeyVault": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('saveStorageAccountSecretsInKeyVault.{0}', variables('keyVaultName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -39577,7 +39577,7 @@ }, { "name": "ADLS-ACCOUNT-KEY", - "value": "[listOutputsWithSecureValues('avmStorageAccount', '2022-09-01').primaryAccessKey]" + "value": "[listOutputsWithSecureValues('avmStorageAccount', '2025-04-01').primaryAccessKey]" } ] } @@ -42705,7 +42705,7 @@ }, "sqlDBModule": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.sql.server.{0}', variables('sqlDbName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -49347,7 +49347,7 @@ }, "webServerFarm": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.web.serverfarm.{0}', variables('webServerFarmResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -49375,7 +49375,7 @@ }, "diagnosticSettings": "[if(parameters('enableMonitoring'), createObject('value', createArray(createObject('workspaceResourceId', if(variables('useExistingLogAnalytics'), parameters('existingLogAnalyticsWorkspaceId'), reference('logAnalyticsWorkspace').outputs.resourceId.value)))), createObject('value', null()))]", "skuName": "[if(or(parameters('enableScalability'), parameters('enableRedundancy')), createObject('value', 'P1v3'), createObject('value', 'B3'))]", - "skuCapacity": "[if(parameters('enableScalability'), createObject('value', 3), createObject('value', 1))]", + "skuCapacity": "[if(parameters('enableScalability'), createObject('value', 1), createObject('value', 1))]", "zoneRedundant": "[if(parameters('enableRedundancy'), createObject('value', true()), createObject('value', false()))]" }, "template": { @@ -49918,7 +49918,7 @@ }, "webSite": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('module.web-sites.{0}', variables('webSiteResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -50027,8 +50027,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "4298119334635398540" + "version": "0.38.33.27573", + "templateHash": "14170137035624875111" } }, "definitions": { @@ -51005,7 +51005,7 @@ "count": "[length(coalesce(parameters('configs'), createArray()))]" }, "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('{0}-Site-Config-{1}', uniqueString(deployment().name, parameters('location')), copyIndex())]", "properties": { "expressionEvaluationOptions": { @@ -51040,8 +51040,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "4653685834544796273" + "version": "0.38.33.27573", + "templateHash": "16983009113856606195" }, "name": "Site App Settings", "description": "This module deploys a Site App Setting." @@ -51186,7 +51186,7 @@ "count": "[length(coalesce(parameters('privateEndpoints'), createArray()))]" }, "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[format('{0}-app-PrivateEndpoint-{1}', uniqueString(deployment().name, parameters('location')), copyIndex())]", "subscriptionId": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[2]]", "resourceGroup": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[4]]", @@ -52012,7 +52012,7 @@ }, "searchService": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "[take(format('avm.res.search.search-service.{0}', variables('aiSearchName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -54392,7 +54392,7 @@ "existing_AIProject_SearchConnectionModule": { "condition": "[variables('useExistingAiFoundryAiProject')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "aiProjectSearchConnectionDeployment", "subscriptionId": "[variables('aiFoundryAiServicesSubscriptionId')]", "resourceGroup": "[variables('aiFoundryAiServicesResourceGroupName')]", @@ -54427,8 +54427,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "6038840175458269917" + "version": "0.38.33.27573", + "templateHash": "11311597701635556530" } }, "parameters": { @@ -54496,7 +54496,7 @@ "searchServiceToExistingAiServicesRoleAssignment": { "condition": "[variables('useExistingAiFoundryAiProject')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2022-09-01", + "apiVersion": "2025-04-01", "name": "searchToExistingAiServices-roleAssignment", "subscriptionId": "[variables('aiFoundryAiServicesSubscriptionId')]", "resourceGroup": "[variables('aiFoundryAiServicesResourceGroupName')]", @@ -54522,8 +54522,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.37.4.10188", - "templateHash": "3644919950024112374" + "version": "0.38.33.27573", + "templateHash": "9717690292313179013" } }, "parameters": { From 52e6cd68eeb18516b75729432dbcda54eb078056 Mon Sep 17 00:00:00 2001 From: Pavan-Microsoft Date: Fri, 7 Nov 2025 11:15:10 +0530 Subject: [PATCH 15/32] fix: rename deploymentType parameter to gptModelDeploymentType for clarity --- infra/main.waf.parameters.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infra/main.waf.parameters.json b/infra/main.waf.parameters.json index fcdb7319..ec12206b 100644 --- a/infra/main.waf.parameters.json +++ b/infra/main.waf.parameters.json @@ -8,7 +8,7 @@ "cosmosLocation": { "value": "${AZURE_ENV_COSMOS_LOCATION}" }, - "deploymentType": { + "gptModelDeploymentType": { "value": "${AZURE_ENV_MODEL_DEPLOYMENT_TYPE}" }, "gptModelName": { From 3cddee54494003af70bc593ccfad0ad9d52a5cf3 Mon Sep 17 00:00:00 2001 From: UtkarshMishra-Microsoft Date: Fri, 7 Nov 2025 14:59:00 +0530 Subject: [PATCH 16/32] fix: Re-query data after updating sample data to refresh rows (#739) * DataRefreshLogic * pylint * pylintfix * fix --- src/App/backend/services/sqldb_service.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/App/backend/services/sqldb_service.py b/src/App/backend/services/sqldb_service.py index 1e003616..dd643003 100644 --- a/src/App/backend/services/sqldb_service.py +++ b/src/App/backend/services/sqldb_service.py @@ -157,6 +157,9 @@ def get_client_data(): if len(rows) <= 6: update_sample_data(conn) + cursor = conn.cursor() + cursor.execute(sql_stmt) + rows = dict_cursor(cursor) formatted_users = [] for row in rows: From e38bdffa16174a887e557f9090e87104ce0718a0 Mon Sep 17 00:00:00 2001 From: Pavan-Microsoft Date: Fri, 7 Nov 2025 15:01:21 +0530 Subject: [PATCH 17/32] fix: clarify skuCapacity comment in main.bicep for WAF deployment --- infra/main.bicep | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infra/main.bicep b/infra/main.bicep index 4de7551f..413db113 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -1066,7 +1066,7 @@ module webServerFarm 'br/public:avm/res/web/serverfarm:0.5.0' = { diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null // WAF aligned configuration for Scalability skuName: enableScalability || enableRedundancy ? 'P1v3' : 'B3' - skuCapacity: enableScalability ? 1 : 1 + skuCapacity: enableScalability ? 1 : 1 // skuCapacity set to 1 (not 3) due to multiple agents created per type during WAF deployment // WAF aligned configuration for Redundancy zoneRedundant: enableRedundancy ? true : false } From f09167fcc1ffc1c9d0660a34f0916411f2da3c31 Mon Sep 17 00:00:00 2001 From: Pavan-Microsoft Date: Fri, 7 Nov 2025 15:25:17 +0530 Subject: [PATCH 18/32] fix: update skuCapacity configuration for WAF deployment to ensure correct agent handling --- infra/main.bicep | 3 ++- infra/main.json | 10 ++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/infra/main.bicep b/infra/main.bicep index 413db113..9ac85754 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -1066,7 +1066,8 @@ module webServerFarm 'br/public:avm/res/web/serverfarm:0.5.0' = { diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null // WAF aligned configuration for Scalability skuName: enableScalability || enableRedundancy ? 'P1v3' : 'B3' - skuCapacity: enableScalability ? 1 : 1 // skuCapacity set to 1 (not 3) due to multiple agents created per type during WAF deployment + // skuCapacity: enableScalability ? 3 : 1 + skuCapacity: 1 // skuCapacity set to 1 (not 3) due to multiple agents created per type during WAF deployment // WAF aligned configuration for Redundancy zoneRedundant: enableRedundancy ? true : false } diff --git a/infra/main.json b/infra/main.json index 923ba229..edab9db4 100644 --- a/infra/main.json +++ b/infra/main.json @@ -6,7 +6,7 @@ "_generator": { "name": "bicep", "version": "0.38.33.27573", - "templateHash": "13493219753752927332" + "templateHash": "2150473040924531792" } }, "parameters": { @@ -29915,9 +29915,9 @@ } }, "dependsOn": [ - "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').openAI)]", "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').cognitiveServices)]", "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').aiServices)]", + "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').openAI)]", "logAnalyticsWorkspace", "userAssignedIdentity", "virtualNetwork" @@ -39524,8 +39524,8 @@ } }, "dependsOn": [ - "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').storageBlob)]", "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').storageQueue)]", + "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').storageBlob)]", "keyvault", "userAssignedIdentity", "virtualNetwork" @@ -49375,7 +49375,9 @@ }, "diagnosticSettings": "[if(parameters('enableMonitoring'), createObject('value', createArray(createObject('workspaceResourceId', if(variables('useExistingLogAnalytics'), parameters('existingLogAnalyticsWorkspaceId'), reference('logAnalyticsWorkspace').outputs.resourceId.value)))), createObject('value', null()))]", "skuName": "[if(or(parameters('enableScalability'), parameters('enableRedundancy')), createObject('value', 'P1v3'), createObject('value', 'B3'))]", - "skuCapacity": "[if(parameters('enableScalability'), createObject('value', 1), createObject('value', 1))]", + "skuCapacity": { + "value": 1 + }, "zoneRedundant": "[if(parameters('enableRedundancy'), createObject('value', true()), createObject('value', false()))]" }, "template": { From 4e14e12c2741e7037b7a5a3c3bdafac8c2e55d03 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Tue, 11 Nov 2025 12:03:30 +0530 Subject: [PATCH 19/32] Infra changes for OpenAI API call replacement --- infra/main.bicep | 55 +++++++++---------- infra/main.parameters.json | 2 +- infra/main.waf.parameters.json | 2 +- infra/modules/dependencies.bicep | 8 --- .../index_scripts/create_search_index.py | 24 +++++--- src/App/backend/common/config.py | 14 ++--- 6 files changed, 51 insertions(+), 54 deletions(-) diff --git a/infra/main.bicep b/infra/main.bicep index 6d94c82a..a33503c6 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -33,8 +33,8 @@ param gptModelVersion string = '2024-07-18' @description('Optional. Version of the GPT model to deploy.') param embeddingModelVersion string = '2' -@description('Optional. API version for the Azure OpenAI service.') -param azureOpenaiAPIVersion string = '2025-04-01-preview' +@description('Optional. API version for the Azure AI Services.') +param azureAIServicesAPIVersion string = '2025-04-01-preview' @minValue(10) @description('Optional. Capacity of the GPT deployment:') @@ -494,7 +494,6 @@ module jumpboxVM 'br/public:avm/res/compute/virtual-machine:0.15.0' = if (enable // ========== Private DNS Zones ========== // var privateDnsZones = [ 'privatelink.cognitiveservices.azure.com' - 'privatelink.openai.azure.com' 'privatelink.services.ai.azure.com' 'privatelink.azurewebsites.net' 'privatelink.blob.${environment().suffixes.storage}' @@ -509,22 +508,20 @@ var privateDnsZones = [ // DNS Zone Index Constants var dnsZoneIndex = { cognitiveServices: 0 - openAI: 1 - aiServices: 2 - appService: 3 - storageBlob: 4 - storageQueue: 5 - storageFile: 6 - cosmosDB: 7 - keyVault: 8 - sqlServer: 9 - searchService: 10 + aiServices: 1 + appService: 2 + storageBlob: 3 + storageQueue: 4 + storageFile: 5 + cosmosDB: 6 + keyVault: 7 + sqlServer: 8 + searchService: 9 } // List of DNS zone indices that correspond to AI-related services. var aiRelatedDnsZoneIndices = [ dnsZoneIndex.cognitiveServices - dnsZoneIndex.openAI dnsZoneIndex.aiServices ] @@ -613,11 +610,11 @@ module keyvault 'br/public:avm/res/key-vault/vault:0.12.1' = { } { name: 'AZURE-OPENAI-PREVIEW-API-VERSION' - value: azureOpenaiAPIVersion + value: azureAIServicesAPIVersion } { name: 'AZURE-OPENAI-ENDPOINT' - value: aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] + value: aiFoundryAiServices.outputs.endpoint } { name: 'AZURE-OPENAI-EMBEDDING-MODEL' @@ -631,6 +628,10 @@ module keyvault 'br/public:avm/res/key-vault/vault:0.12.1' = { name: 'AZURE-SEARCH-ENDPOINT' value: 'https://${aiSearchName}.search.windows.net' } + { + name: 'AZURE-AI-AGENT-ENDPOINT' + value: aiFoundryAiServices.outputs.aiProjectInfo.apiEndpoint + } ] enableTelemetry: enableTelemetry } @@ -733,10 +734,6 @@ module aiFoundryAiServices 'modules/ai-services.bicep' = if (aiFoundryAIservices name: 'ai-services-dns-zone-cognitiveservices' privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.cognitiveServices]!.outputs.resourceId } - { - name: 'ai-services-dns-zone-openai' - privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.openAI]!.outputs.resourceId - } { name: 'ai-services-dns-zone-aiservices' privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.aiServices]!.outputs.resourceId @@ -1110,20 +1107,20 @@ module webSite 'modules/web-sites.bicep' = { AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn AZURE_OPENAI_RESOURCE: aiFoundryAiServices.outputs.name AZURE_OPENAI_MODEL: gptModelName - AZURE_OPENAI_ENDPOINT: aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] + AZURE_OPENAI_ENDPOINT: aiFoundryAiServices.outputs.endpoint AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature AZURE_OPENAI_TOP_P: azureOpenAITopP AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage - AZURE_OPENAI_PREVIEW_API_VERSION: azureOpenaiAPIVersion + AZURE_OPENAI_PREVIEW_API_VERSION: azureAIServicesAPIVersion AZURE_OPENAI_STREAM: azureOpenAIStream AZURE_SEARCH_QUERY_TYPE: azureSearchQueryType AZURE_SEARCH_VECTOR_COLUMNS: azureSearchVectorFields AZURE_SEARCH_PERMITTED_GROUPS_COLUMN: azureSearchPermittedGroupsField AZURE_SEARCH_STRICTNESS: azureSearchStrictness AZURE_OPENAI_EMBEDDING_NAME: embeddingModel - AZURE_OPENAI_EMBEDDING_ENDPOINT : aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] + AZURE_OPENAI_EMBEDDING_ENDPOINT : aiFoundryAiServices.outputs.endpoint SQLDB_SERVER: sqlServerFqdn SQLDB_DATABASE: sqlDbName USE_INTERNAL_STREAM: useInternalStream @@ -1139,7 +1136,7 @@ module webSite 'modules/web-sites.bicep' = { USE_AI_PROJECT_CLIENT: useAIProjectClientFlag AZURE_AI_AGENT_ENDPOINT: aiFoundryAiServices.outputs.aiProjectInfo.apiEndpoint AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME: gptModelName - AZURE_AI_AGENT_API_VERSION: azureOpenaiAPIVersion + AZURE_AI_AGENT_API_VERSION: azureAIServicesAPIVersion AZURE_SEARCH_CONNECTION_NAME: aiSearchName AZURE_CLIENT_ID: userAssignedIdentity.outputs.clientId } @@ -1344,8 +1341,8 @@ output APPLICATIONINSIGHTS_CONNECTION_STRING string = enableMonitoring ? applicationInsights!.outputs.connectionString : '' -@description('The API version used for the Azure AI Agent service.') -output AZURE_AI_AGENT_API_VERSION string = azureOpenaiAPIVersion + @description('The API version used for the Azure AI Agent service.') +output AZURE_AI_AGENT_API_VERSION string = azureAIServicesAPIVersion @description('The endpoint URL of the Azure AI Agent project.') output AZURE_AI_AGENT_ENDPOINT string = aiFoundryAiServices.outputs.aiProjectInfo.apiEndpoint @@ -1372,13 +1369,13 @@ output AZURE_COSMOSDB_DATABASE string = cosmosDbDatabaseName output AZURE_COSMOSDB_ENABLE_FEEDBACK string = azureCosmosDbEnableFeedback @description('The endpoint URL for the Azure OpenAI Embedding model.') -output AZURE_OPENAI_EMBEDDING_ENDPOINT string = aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] +output AZURE_OPENAI_EMBEDDING_ENDPOINT string = aiFoundryAiServices.outputs.endpoint @description('The name of the Azure OpenAI Embedding model.') output AZURE_OPENAI_EMBEDDING_NAME string = embeddingModel @description('The endpoint URL for the Azure OpenAI service.') -output AZURE_OPENAI_ENDPOINT string = aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] +output AZURE_OPENAI_ENDPOINT string = aiFoundryAiServices.outputs.endpoint @description('The maximum number of tokens for Azure OpenAI responses.') output AZURE_OPENAI_MAX_TOKENS string = azureOpenAIMaxTokens @@ -1387,7 +1384,7 @@ output AZURE_OPENAI_MAX_TOKENS string = azureOpenAIMaxTokens output AZURE_OPENAI_MODEL string = gptModelName @description('The preview API version for Azure OpenAI.') -output AZURE_OPENAI_PREVIEW_API_VERSION string = azureOpenaiAPIVersion +output AZURE_OPENAI_PREVIEW_API_VERSION string = azureAIServicesAPIVersion @description('The Azure OpenAI resource name.') output AZURE_OPENAI_RESOURCE string = aiFoundryAiServices.outputs.name diff --git a/infra/main.parameters.json b/infra/main.parameters.json index 092e5a31..de31ba4b 100644 --- a/infra/main.parameters.json +++ b/infra/main.parameters.json @@ -14,7 +14,7 @@ "gptModelName": { "value": "${AZURE_ENV_MODEL_NAME}" }, - "azureOpenaiAPIVersion": { + "azureAIServicesAPIVersion": { "value": "${AZURE_ENV_MODEL_VERSION}" }, "gptDeploymentCapacity": { diff --git a/infra/main.waf.parameters.json b/infra/main.waf.parameters.json index fcdb7319..78221d74 100644 --- a/infra/main.waf.parameters.json +++ b/infra/main.waf.parameters.json @@ -14,7 +14,7 @@ "gptModelName": { "value": "${AZURE_ENV_MODEL_NAME}" }, - "azureOpenaiAPIVersion": { + "azureAIServicesAPIVersion": { "value": "${AZURE_ENV_MODEL_VERSION}" }, "gptDeploymentCapacity": { diff --git a/infra/modules/dependencies.bicep b/infra/modules/dependencies.bicep index 9c9efb27..587f4512 100644 --- a/infra/modules/dependencies.bicep +++ b/infra/modules/dependencies.bicep @@ -129,14 +129,6 @@ var builtInRoleNames = { 'Microsoft.Authorization/roleDefinitions', '3b20f47b-3825-43cb-8114-4bd2201156a8' ) - 'Cognitive Services OpenAI Contributor': subscriptionResourceId( - 'Microsoft.Authorization/roleDefinitions', - 'a001fd3d-188f-4b5d-821b-7da978bf7442' - ) - 'Cognitive Services OpenAI User': subscriptionResourceId( - 'Microsoft.Authorization/roleDefinitions', - '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd' - ) 'Cognitive Services QnA Maker Editor': subscriptionResourceId( 'Microsoft.Authorization/roleDefinitions', 'f4cc2bf9-21be-47a1-bdf1-5c5804381025' diff --git a/infra/scripts/index_scripts/create_search_index.py b/infra/scripts/index_scripts/create_search_index.py index b0a56f11..bd5ed961 100644 --- a/infra/scripts/index_scripts/create_search_index.py +++ b/infra/scripts/index_scripts/create_search_index.py @@ -31,7 +31,7 @@ DataLakeServiceClient, FileSystemClient, ) -from openai import AzureOpenAI +from azure.ai.projects import AIProjectClient # Get Azure Key Vault Client key_vault_name = "kv_to-be-replaced" #'nc6262-kv-2fpeafsylfd2e' @@ -61,6 +61,7 @@ openai_api_version = secret_client.get_secret("AZURE-OPENAI-PREVIEW-API-VERSION").value openai_embedding_model = secret_client.get_secret("AZURE-OPENAI-EMBEDDING-MODEL").value account_name = secret_client.get_secret("ADLS-ACCOUNT-NAME").value +ai_project_endpoint = secret_client.get_secret("AZURE-AI-AGENT-ENDPOINT").value # Create a search index index_client = SearchIndexClient(endpoint=search_endpoint, credential=credential) @@ -132,15 +133,22 @@ # Function: Get Embeddings -def get_embeddings(text: str, openai_api_base, openai_api_version, azure_token_provider): +def get_embeddings(text: str, ai_project_endpoint, openai_api_version, credential): model_id = openai_embedding_model or "text-embedding-ada-002" - client = AzureOpenAI( + + # Create AI Projects client + project_client = AIProjectClient( + endpoint=ai_project_endpoint, + credential=credential, api_version=openai_api_version, - azure_endpoint=openai_api_base, - azure_ad_token_provider=azure_token_provider, + ) + + # Get the OpenAI client from the AI Projects client + openai_client = project_client.get_openai_client( + api_version=openai_api_version ) - embedding = client.embeddings.create(input=text, model=model_id).data[0].embedding + embedding = openai_client.embeddings.create(input=text, model=model_id).data[0].embedding return embedding @@ -260,12 +268,12 @@ def chunk_data(text): try: v_contentVector = get_embeddings( - d["content"], openai_api_base, openai_api_version, token_provider + d["content"], ai_project_endpoint, openai_api_version, credential ) except: time.sleep(30) v_contentVector = get_embeddings( - d["content"], openai_api_base, openai_api_version, token_provider + d["content"], ai_project_endpoint, openai_api_version, credential ) docs.append( diff --git a/src/App/backend/common/config.py b/src/App/backend/common/config.py index f841fb43..49775830 100644 --- a/src/App/backend/common/config.py +++ b/src/App/backend/common/config.py @@ -76,7 +76,13 @@ def __init__(self): "AZURE_SEARCH_CONNECTION_NAME", "foundry-search-connection" ) - # AOAI Integration Settings + # AI Project Client configuration + self.USE_AI_PROJECT_CLIENT = ( + os.getenv("USE_AI_PROJECT_CLIENT", "True").lower() == "true" + ) + self.AI_PROJECT_ENDPOINT = os.getenv("AZURE_AI_AGENT_ENDPOINT") + + # AOAI Integration Settings (used via AI Project Client) self.AZURE_OPENAI_RESOURCE = os.environ.get("AZURE_OPENAI_RESOURCE") self.AZURE_OPENAI_MODEL = os.environ.get("AZURE_OPENAI_MODEL") self.AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT") @@ -130,12 +136,6 @@ def __init__(self): os.environ.get("SANITIZE_ANSWER", "false").lower() == "true" ) - # AI Project Client configuration - self.USE_AI_PROJECT_CLIENT = ( - os.getenv("USE_AI_PROJECT_CLIENT", "False").lower() == "true" - ) - self.AI_PROJECT_ENDPOINT = os.getenv("AZURE_AI_AGENT_ENDPOINT") - # SQL Database configuration self.SQL_DATABASE = os.getenv("SQLDB_DATABASE") self.SQL_SERVER = os.getenv("SQLDB_SERVER") From 5667d769a685f73750729a592e60ed8fd1cb5406 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Tue, 11 Nov 2025 14:11:44 +0530 Subject: [PATCH 20/32] revert changes --- .../index_scripts/create_search_index.py | 68 +++++++++++-------- 1 file changed, 39 insertions(+), 29 deletions(-) diff --git a/infra/scripts/index_scripts/create_search_index.py b/infra/scripts/index_scripts/create_search_index.py index 87b3ef77..bd5ed961 100644 --- a/infra/scripts/index_scripts/create_search_index.py +++ b/infra/scripts/index_scripts/create_search_index.py @@ -31,8 +31,7 @@ DataLakeServiceClient, FileSystemClient, ) -from openai import AzureOpenAI -from azure.storage.blob import BlobServiceClient +from azure.ai.projects import AIProjectClient # Get Azure Key Vault Client key_vault_name = "kv_to-be-replaced" #'nc6262-kv-2fpeafsylfd2e' @@ -62,6 +61,7 @@ openai_api_version = secret_client.get_secret("AZURE-OPENAI-PREVIEW-API-VERSION").value openai_embedding_model = secret_client.get_secret("AZURE-OPENAI-EMBEDDING-MODEL").value account_name = secret_client.get_secret("ADLS-ACCOUNT-NAME").value +ai_project_endpoint = secret_client.get_secret("AZURE-AI-AGENT-ENDPOINT").value # Create a search index index_client = SearchIndexClient(endpoint=search_endpoint, credential=credential) @@ -133,15 +133,22 @@ # Function: Get Embeddings -def get_embeddings(text: str, openai_api_base, openai_api_version, azure_token_provider): +def get_embeddings(text: str, ai_project_endpoint, openai_api_version, credential): model_id = openai_embedding_model or "text-embedding-ada-002" - client = AzureOpenAI( + + # Create AI Projects client + project_client = AIProjectClient( + endpoint=ai_project_endpoint, + credential=credential, api_version=openai_api_version, - azure_endpoint=openai_api_base, - azure_ad_token_provider=azure_token_provider, + ) + + # Get the OpenAI client from the AI Projects client + openai_client = project_client.get_openai_client( + api_version=openai_api_version ) - embedding = client.embeddings.create(input=text, model=model_id).data[0].embedding + embedding = openai_client.embeddings.create(input=text, model=model_id).data[0].embedding return embedding @@ -200,13 +207,16 @@ def chunk_data(text): # paths = os.listdir(path_name) -account_url = f"https://{account_name}.blob.core.windows.net" -blob_service_client = BlobServiceClient(account_url, credential=credential) -container_client = blob_service_client.get_container_client(file_system_client_name) +account_url = f"https://{account_name}.dfs.core.windows.net" -print(f"Listing blobs under '{directory}' using BlobServiceClient...") -paths = [blob.name for blob in container_client.list_blobs(name_starts_with=directory)] +service_client = DataLakeServiceClient( + account_url, credential=credential, api_version="2023-01-03" +) +file_system_client = service_client.get_file_system_client(file_system_client_name) +directory_name = directory +paths = file_system_client.get_paths(path=directory_name) +print(paths) search_client = SearchClient(search_endpoint, index_name, credential) # index_client = SearchIndexClient(endpoint=search_endpoint, credential=credential) @@ -219,22 +229,22 @@ def chunk_data(text): # Read the CSV file into a Pandas DataFrame file_path = csv_file_name print(file_path) -blob_client = container_client.get_blob_client(file_path) -download_stream = blob_client.download_blob() -df_metadata = pd.read_csv(download_stream, encoding="utf-8") +file_client = file_system_client.get_file_client(file_path) +csv_file = file_client.download_file() +df_metadata = pd.read_csv(csv_file, encoding="utf-8") docs = [] counter = 0 -for blob_name in paths: - if not blob_name.endswith(".json"): - continue - - blob_client = container_client.get_blob_client(blob_name) - download_stream = blob_client.download_blob() - data = json.loads(download_stream.readall()) - text = data.get("Content", "") - - filename = blob_name.split("/")[-1] +for path in paths: + # file_path = f'Data/{foldername}/meeting_transcripts/' + path + # with open(file_path, "r") as file: + # data = json.load(file) + file_client = file_system_client.get_file_client(path.name) + data_file = file_client.download_file() + data = json.load(data_file) + text = data["Content"] + + filename = path.name.split("/")[-1] document_id = filename.replace(".json", "").replace("convo_", "") # print(document_id) df_file_metadata = df_metadata[ @@ -258,12 +268,12 @@ def chunk_data(text): try: v_contentVector = get_embeddings( - d["content"], openai_api_base, openai_api_version, token_provider + d["content"], ai_project_endpoint, openai_api_version, credential ) except: time.sleep(30) v_contentVector = get_embeddings( - d["content"], openai_api_base, openai_api_version, token_provider + d["content"], ai_project_endpoint, openai_api_version, credential ) docs.append( @@ -274,7 +284,7 @@ def chunk_data(text): "chunk_id": d["chunk_id"], "client_id": d["client_id"], "content": d["content"], - "sourceurl": blob_name.split("/")[-1], + "sourceurl": path.name.split("/")[-1], "contentVector": v_contentVector, } ) @@ -282,7 +292,7 @@ def chunk_data(text): if counter % 10 == 0: result = search_client.upload_documents(documents=docs) docs = [] - print(f"{counter} documents uploaded...") + print(f" {str(counter)} uploaded") # upload the last batch if docs != []: From bfe8a21922c549c901dbe45bd578afb2d9816ff8 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Tue, 11 Nov 2025 14:14:26 +0530 Subject: [PATCH 21/32] revert changes - 1 --- .../index_scripts/create_search_index.py | 24 +++++++------------ 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/infra/scripts/index_scripts/create_search_index.py b/infra/scripts/index_scripts/create_search_index.py index bd5ed961..b0a56f11 100644 --- a/infra/scripts/index_scripts/create_search_index.py +++ b/infra/scripts/index_scripts/create_search_index.py @@ -31,7 +31,7 @@ DataLakeServiceClient, FileSystemClient, ) -from azure.ai.projects import AIProjectClient +from openai import AzureOpenAI # Get Azure Key Vault Client key_vault_name = "kv_to-be-replaced" #'nc6262-kv-2fpeafsylfd2e' @@ -61,7 +61,6 @@ openai_api_version = secret_client.get_secret("AZURE-OPENAI-PREVIEW-API-VERSION").value openai_embedding_model = secret_client.get_secret("AZURE-OPENAI-EMBEDDING-MODEL").value account_name = secret_client.get_secret("ADLS-ACCOUNT-NAME").value -ai_project_endpoint = secret_client.get_secret("AZURE-AI-AGENT-ENDPOINT").value # Create a search index index_client = SearchIndexClient(endpoint=search_endpoint, credential=credential) @@ -133,22 +132,15 @@ # Function: Get Embeddings -def get_embeddings(text: str, ai_project_endpoint, openai_api_version, credential): +def get_embeddings(text: str, openai_api_base, openai_api_version, azure_token_provider): model_id = openai_embedding_model or "text-embedding-ada-002" - - # Create AI Projects client - project_client = AIProjectClient( - endpoint=ai_project_endpoint, - credential=credential, + client = AzureOpenAI( api_version=openai_api_version, - ) - - # Get the OpenAI client from the AI Projects client - openai_client = project_client.get_openai_client( - api_version=openai_api_version + azure_endpoint=openai_api_base, + azure_ad_token_provider=azure_token_provider, ) - embedding = openai_client.embeddings.create(input=text, model=model_id).data[0].embedding + embedding = client.embeddings.create(input=text, model=model_id).data[0].embedding return embedding @@ -268,12 +260,12 @@ def chunk_data(text): try: v_contentVector = get_embeddings( - d["content"], ai_project_endpoint, openai_api_version, credential + d["content"], openai_api_base, openai_api_version, token_provider ) except: time.sleep(30) v_contentVector = get_embeddings( - d["content"], ai_project_endpoint, openai_api_version, credential + d["content"], openai_api_base, openai_api_version, token_provider ) docs.append( From fc4515658ef8347ca4c3a54d8a966a3da56daac8 Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Wed, 12 Nov 2025 11:36:54 +0530 Subject: [PATCH 22/32] add logic for both avm and bicep deployment --- infra/scripts/process_sample_data.sh | 60 ++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 12 deletions(-) diff --git a/infra/scripts/process_sample_data.sh b/infra/scripts/process_sample_data.sh index 1ccaf9a0..3cd038f0 100644 --- a/infra/scripts/process_sample_data.sh +++ b/infra/scripts/process_sample_data.sh @@ -346,18 +346,54 @@ get_values_from_az_deployment() { --query "properties.outputs" -o json) # Extract each value - cosmosDbAccountName=$(echo "$deploymentOutputs" | grep -A 3 '"cosmosDbAccountName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - storageAccount=$(echo "$deploymentOutputs" | grep -A 3 '"storageAccountName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - fileSystem=$(echo "$deploymentOutputs" | grep -A 3 '"storageContainerName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - keyvaultName=$(echo "$deploymentOutputs" | grep -A 3 '"keyVaultName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - sqlServerName=$(echo "$deploymentOutputs" | grep -A 3 '"sqlDbServerName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - webAppManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentityWebAppName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - webAppManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentityWebAppClientId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - SqlDatabaseName=$(echo "$deploymentOutputs" | grep -A 3 '"sqlDbDatabase"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - sqlManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentitySqlClientId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - sqlManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentitySqlName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - aiSearchName=$(echo "$deploymentOutputs" | grep -A 3 '"aiSearchServiceName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') - aif_resource_id=$(echo "$deploymentOutputs" | grep -A 3 '"aiFoundryResourceId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + cosmosDbAccountName=$(echo "$deploymentOutputs" | grep -A 3 '"cosmosdB_ACCOUNT_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$cosmosDbAccountName" ]; then + cosmosDbAccountName=$(echo "$deploymentOutputs" | grep -A 3 '"cosmosDbAccountName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + storageAccount=$(echo "$deploymentOutputs" | grep -A 3 '"storagE_ACCOUNT_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$storageAccount" ]; then + storageAccount=$(echo "$deploymentOutputs" | grep -A 3 '"storageAccountName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + fileSystem=$(echo "$deploymentOutputs" | grep -A 3 '"storagE_CONTAINER_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$fileSystem" ]; then + fileSystem=$(echo "$deploymentOutputs" | grep -A 3 '"storageContainerName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + keyvaultName=$(echo "$deploymentOutputs" | grep -A 3 '"keY_VAULT_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$keyvaultName" ]; then + keyvaultName=$(echo "$deploymentOutputs" | grep -A 3 '"keyVaultName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + sqlServerName=$(echo "$deploymentOutputs" | grep -A 3 '"sqlDb_SERVER_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$sqlServerName" ]; then + sqlServerName=$(echo "$deploymentOutputs" | grep -A 3 '"sqlDbServerName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + webAppManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedidentitY_WEBAPP_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$webAppManagedIdentityDisplayName" ]; then + webAppManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentityWebAppName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + webAppManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedidentitY_WEBAPP_CLIENTID"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$webAppManagedIdentityClientId" ]; then + webAppManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentityWebAppClientId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + SqlDatabaseName=$(echo "$deploymentOutputs" | grep -A 3 '"sqlDb_DATABASE"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$SqlDatabaseName" ]; then + SqlDatabaseName=$(echo "$deploymentOutputs" | grep -A 3 '"sqlDbDatabase"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + sqlManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedidentitY_SQL_CLIENTID"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$sqlManagedIdentityClientId" ]; then + sqlManagedIdentityClientId=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentitySqlClientId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + sqlManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedidentitY_SQL_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$sqlManagedIdentityDisplayName" ]; then + sqlManagedIdentityDisplayName=$(echo "$deploymentOutputs" | grep -A 3 '"managedIdentitySqlName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + aiSearchName=$(echo "$deploymentOutputs" | grep -A 3 '"aI_SEARCH_SERVICE_NAME"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$aiSearchName" ]; then + aiSearchName=$(echo "$deploymentOutputs" | grep -A 3 '"aiSearchServiceName"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi + aif_resource_id=$(echo "$deploymentOutputs" | grep -A 3 '"aI_FOUNDRY_RESOURCE_ID"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + if [ -z "$aif_resource_id" ]; then + aif_resource_id=$(echo "$deploymentOutputs" | grep -A 3 '"aiFoundryResourceId"' | grep '"value"' | sed 's/.*"value": *"\([^"]*\)".*/\1/') + fi # Validate that we extracted all required values if [ -z "$cosmosDbAccountName" ] || [ -z "$storageAccount" ] || [ -z "$fileSystem" ] || [ -z "$keyvaultName" ] || [ -z "$sqlServerName" ] || [ -z "$SqlDatabaseName" ] || [ -z "$sqlManagedIdentityClientId" ] || [ -z "$sqlManagedIdentityDisplayName" ] || [ -z "$aiSearchName" ] || [ -z "$aif_resource_id" ]; then From 63d756d59642643f1221aa4e7ae5b498f3f607c2 Mon Sep 17 00:00:00 2001 From: Kanchan-Microsoft Date: Wed, 12 Nov 2025 14:37:03 +0530 Subject: [PATCH 23/32] feat: Enhance Developer Experience Support in Client Advisor (#735) * custom templates for developer experience * updated azure_custom.yaml * updated readme * updated template name * updated keyvault prefix to lower case * changes suggested by copilot * main.json --- azure_custom.yaml | 327 ++++++++++ docs/DeploymentGuide.md | 7 + infra/main.bicep | 4 +- infra/main.json | 178 +++--- infra/main_custom.bicep | 1245 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 1669 insertions(+), 92 deletions(-) create mode 100644 azure_custom.yaml create mode 100644 infra/main_custom.bicep diff --git a/azure_custom.yaml b/azure_custom.yaml new file mode 100644 index 00000000..09b79a1e --- /dev/null +++ b/azure_custom.yaml @@ -0,0 +1,327 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/Azure/azure-dev/main/schemas/v1.0/azure.yaml.json + +name: build-your-own-copilot-solution-accelerator + +requiredVersions: + azd: ">= 1.18.0" + +metadata: + template: build-your-own-copilot-solution-accelerator@1.0 + name: build-your-own-copilot-solution-accelerator@1.0 + +hooks: + # Pre-package hook to set container registry variables + prepackage: + windows: + run: | + Write-Host "Setting up container registry variables..." -ForegroundColor Green + + # Get the ACR name from the deployed infrastructure + $acrName = azd env get-values --output json | ConvertFrom-Json | Select-Object -ExpandProperty "AZURE_CONTAINER_REGISTRY_NAME" -ErrorAction SilentlyContinue + + if ($acrName) { + Write-Host "Using deployed ACR: $acrName" -ForegroundColor Cyan + azd env set AZURE_CONTAINER_REGISTRY_ENDPOINT "$acrName.azurecr.io" + azd env set AZURE_CONTAINER_REGISTRY_NAME $acrName + } else { + Write-Host "Warning: ACR not found in environment. Make sure infrastructure is deployed first." -ForegroundColor Yellow + } + shell: pwsh + continueOnError: true + + posix: + run: | + echo "Setting up container registry variables..." + + # Get the ACR name from the deployed infrastructure + ACR_NAME=$(azd env get-values --output json | jq -r '.AZURE_CONTAINER_REGISTRY_NAME // empty') + + if [ ! -z "$ACR_NAME" ]; then + echo "Using deployed ACR: $ACR_NAME" + azd env set AZURE_CONTAINER_REGISTRY_ENDPOINT "$ACR_NAME.azurecr.io" + azd env set AZURE_CONTAINER_REGISTRY_NAME "$ACR_NAME" + else + echo "Warning: ACR not found in environment. Make sure infrastructure is deployed first." + fi + shell: sh + continueOnError: true + + # Pre-deploy hook to build and push containers + predeploy: + windows: + run: | + Write-Host "🚀 Starting container deployment process..." -ForegroundColor Green + + # Get environment variables from azd + $acrName = azd env get-value AZURE_CONTAINER_REGISTRY_NAME + $resourceGroup = azd env get-value AZURE_RESOURCE_GROUP + $webAppName = azd env get-value WEB_APP_NAME + $imageName = "byc-wa-app" + $imageTag = "latest" + + if (-not $acrName) { + Write-Host "❌ Error: AZURE_CONTAINER_REGISTRY_NAME not set. Run 'azd provision' first." -ForegroundColor Red + exit 1 + } + + if (-not $resourceGroup) { + Write-Host "❌ Error: AZURE_RESOURCE_GROUP not set. Run 'azd provision' first." -ForegroundColor Red + exit 1 + } + + if (-not $webAppName) { + Write-Host "❌ Error: WEB_APP_NAME not set. Run 'azd provision' first." -ForegroundColor Red + exit 1 + } + + Write-Host "📋 Configuration:" -ForegroundColor Cyan + Write-Host " ACR Name: $acrName" -ForegroundColor White + Write-Host " Resource Group: $resourceGroup" -ForegroundColor White + Write-Host " Web App: $webAppName" -ForegroundColor White + Write-Host " Image: $imageName`:$imageTag" -ForegroundColor White + + # Login to ACR + Write-Host "🔐 Logging into ACR..." -ForegroundColor Yellow + az acr login --name $acrName + + if ($LASTEXITCODE -ne 0) { + Write-Host "❌ Failed to login to ACR" -ForegroundColor Red + exit 1 + } + + # Build and push the container image + Write-Host "🏗️ Building container image..." -ForegroundColor Yellow + $fullImageName = "$acrName.azurecr.io/$imageName`:$imageTag" + docker build -f "./src/App/WebApp.Dockerfile" -t $fullImageName "./src" + + if ($LASTEXITCODE -ne 0) { + Write-Host "❌ Failed to build container image" -ForegroundColor Red + exit 1 + } + + Write-Host "📤 Pushing container image to ACR..." -ForegroundColor Yellow + docker push $fullImageName + + if ($LASTEXITCODE -ne 0) { + Write-Host "❌ Failed to push container image" -ForegroundColor Red + exit 1 + } + + # Update environment variables + Write-Host "🔧 Updating azd environment variables..." -ForegroundColor Yellow + azd env set CONTAINER_REGISTRY_HOSTNAME "$acrName.azurecr.io" + azd env set CONTAINER_IMAGE_NAME $imageName + azd env set IMAGE_TAG $imageTag + + # Configure web app ACR authentication using managed identity + Write-Host "🔑 Configuring ACR authentication for web app..." -ForegroundColor Yellow + $webappIdentity = az webapp identity show --name $webAppName --resource-group $resourceGroup --query principalId --output tsv + + if (-not $webappIdentity -or $webappIdentity -eq "null") { + Write-Host "🔄 Enabling managed identity for web app..." -ForegroundColor Yellow + $webappIdentity = az webapp identity assign --name $webAppName --resource-group $resourceGroup --query principalId --output tsv + } + + Write-Host " Web app identity: $webappIdentity" -ForegroundColor White + + # Assign AcrPull role to web app managed identity + Write-Host "🔐 Assigning AcrPull role to web app..." -ForegroundColor Yellow + $acrResourceId = az acr show --name $acrName --resource-group $resourceGroup --query id --output tsv + az role assignment create --assignee $webappIdentity --role AcrPull --scope $acrResourceId + if ($LASTEXITCODE -ne 0) { + Write-Host "⚠️ Role assignment may already exist" -ForegroundColor Yellow + } + + # Configure web app to use ACR with managed identity + Write-Host "🔧 Configuring web app container settings..." -ForegroundColor Yellow + az webapp config appsettings set --name $webAppName --resource-group $resourceGroup --settings ` + DOCKER_REGISTRY_SERVER_URL="https://$acrName.azurecr.io" ` + DOCKER_ENABLE_CI=true + + # Configure web app to use managed identity for ACR authentication + Write-Host "🔐 Enabling ACR managed identity authentication..." -ForegroundColor Yellow + az webapp config set --name $webAppName --resource-group $resourceGroup --acr-use-identity true + + # Update web app to use the new container image + Write-Host "🚀 Updating web app container image..." -ForegroundColor Yellow + $dockerImage = "DOCKER|$fullImageName" + + # Use cmd to avoid PowerShell pipe interpretation issues + $cmd = "az webapp config set --name `"$webAppName`" --resource-group `"$resourceGroup`" --linux-fx-version `"$dockerImage`"" + cmd /c $cmd + + if ($LASTEXITCODE -ne 0) { + Write-Host "❌ Failed to update web app configuration" -ForegroundColor Red + exit 1 + } + + # Restart the web app to ensure it picks up the new configuration + Write-Host "🔄 Restarting web app..." -ForegroundColor Yellow + az webapp restart --name $webAppName --resource-group $resourceGroup + + Write-Host "✅ Container deployment completed successfully!" -ForegroundColor Green + Write-Host "🌐 Web app URL: https://$webAppName.azurewebsites.net" -ForegroundColor Cyan + Write-Host "📦 Container image: $fullImageName" -ForegroundColor Cyan + + Write-Host "" + Write-Host "⏳ The web app may take a few minutes to start up with the new container..." -ForegroundColor Yellow + Write-Host " You can monitor the logs with:" -ForegroundColor White + Write-Host " az webapp log tail --name $webAppName --resource-group $resourceGroup" -ForegroundColor Cyan + shell: pwsh + continueOnError: false + + posix: + run: | + echo "🚀 Starting container deployment process..." + + # Get environment variables from azd + ACR_NAME=$(azd env get-value AZURE_CONTAINER_REGISTRY_NAME) + RESOURCE_GROUP=$(azd env get-value AZURE_RESOURCE_GROUP) + WEB_APP_NAME=$(azd env get-value WEB_APP_NAME) + IMAGE_TAG="latest" + IMAGE_NAME="byc-wa-app" + + if [ -z "$ACR_NAME" ]; then + echo "❌ Error: AZURE_CONTAINER_REGISTRY_NAME not set. Run 'azd provision' first." + exit 1 + fi + + if [ -z "$RESOURCE_GROUP" ]; then + echo "❌ Error: AZURE_RESOURCE_GROUP not set. Run 'azd provision' first." + exit 1 + fi + + if [ -z "$WEB_APP_NAME" ]; then + echo "❌ Error: WEB_APP_NAME not set. Run 'azd provision' first." + exit 1 + fi + + echo "📋 Configuration:" + echo " ACR Name: $ACR_NAME" + echo " Resource Group: $RESOURCE_GROUP" + echo " Web App: $WEB_APP_NAME" + echo " Image: $IMAGE_NAME:$IMAGE_TAG" + + # Login to ACR + echo "🔐 Logging into ACR..." + az acr login --name $ACR_NAME + + # Build and push the container image + echo "🏗️ Building container image..." + FULL_IMAGE_NAME="$ACR_NAME.azurecr.io/$IMAGE_NAME:$IMAGE_TAG" + docker build -f "./src/App/WebApp.Dockerfile" -t $FULL_IMAGE_NAME "./src" + + echo "📤 Pushing container image to ACR..." + docker push $FULL_IMAGE_NAME + + # Update environment variables + echo "🔧 Updating azd environment variables..." + azd env set CONTAINER_REGISTRY_HOSTNAME "$ACR_NAME.azurecr.io" + azd env set CONTAINER_IMAGE_NAME $IMAGE_NAME + azd env set IMAGE_TAG $IMAGE_TAG + + # Configure web app ACR authentication using managed identity + echo "🔑 Configuring ACR authentication for web app..." + WEBAPP_IDENTITY=$(az webapp identity show --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --query principalId --output tsv) + + if [ -z "$WEBAPP_IDENTITY" ] || [ "$WEBAPP_IDENTITY" = "null" ]; then + echo "🔄 Enabling managed identity for web app..." + WEBAPP_IDENTITY=$(az webapp identity assign --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --query principalId --output tsv) + fi + + echo " Web app identity: $WEBAPP_IDENTITY" + + # Assign AcrPull role to web app managed identity + echo "🔐 Assigning AcrPull role to web app..." + ACR_RESOURCE_ID=$(az acr show --name $ACR_NAME --resource-group $RESOURCE_GROUP --query id --output tsv) + az role assignment create --assignee $WEBAPP_IDENTITY --role AcrPull --scope $ACR_RESOURCE_ID || echo "⚠️ Role assignment may already exist" + + # Configure web app to use ACR with managed identity + echo "🔧 Configuring web app container settings..." + az webapp config appsettings set --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --settings \ + DOCKER_REGISTRY_SERVER_URL="https://$ACR_NAME.azurecr.io" \ + DOCKER_ENABLE_CI=true + + # Configure web app to use managed identity for ACR authentication + echo "🔐 Enabling ACR managed identity authentication..." + az webapp config set --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --acr-use-identity true + + # Update web app to use the new container image + echo "🚀 Updating web app container image..." + DOCKER_IMAGE="DOCKER|$FULL_IMAGE_NAME" + az webapp config set --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --linux-fx-version "$DOCKER_IMAGE" + + # Restart the web app to ensure it picks up the new configuration + echo "🔄 Restarting web app..." + az webapp restart --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP + + echo "✅ Container deployment completed successfully!" + echo "🌐 Web app URL: https://$WEB_APP_NAME.azurewebsites.net" + echo "📦 Container image: $FULL_IMAGE_NAME" + + echo "" + echo "⏳ The web app may take a few minutes to start up with the new container..." + echo " You can monitor the logs with:" + echo " az webapp log tail --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP" + shell: sh + continueOnError: false + + postprovision: + windows: + run: | + Write-Host "Deployment completed successfully!" -ForegroundColor Green + Write-Host "Web app URL: " -NoNewline + Write-Host "$env:WEB_APP_URL" -ForegroundColor Cyan + Write-Host "" + Write-Host "Container Registry: " -NoNewline + Write-Host "$env:AZURE_CONTAINER_REGISTRY_NAME.azurecr.io" -ForegroundColor Cyan + Write-Host "" + Write-Host "Next step:" -ForegroundColor Yellow + Write-Host " Run the following command to grant permissions and load sample data:" -ForegroundColor White + Write-Host " bash ./infra/scripts/process_sample_data.sh $env:AZURE_RESOURCE_GROUP" -ForegroundColor Cyan + Write-Host "" + shell: pwsh + continueOnError: false + interactive: true + + posix: + run: | + echo "Deployment completed successfully!" + echo "Web app URL: $WEB_APP_URL" + echo "" + echo "Container Registry: $AZURE_CONTAINER_REGISTRY_NAME.azurecr.io" + echo "" + echo "Next step:" + echo " Run the following command to grant permissions and load sample data:" + echo " bash ./infra/scripts/process_sample_data.sh $AZURE_RESOURCE_GROUP" + echo "" + shell: sh + continueOnError: false + interactive: true + + postdeploy: + windows: + run: | + Write-Host "✅ Deployment completed! Container deployment was handled by predeploy hook." -ForegroundColor Green + $webAppUrl = azd env get-value WEB_APP_URL + Write-Host "🌐 Web app URL: $webAppUrl" -ForegroundColor Cyan + shell: pwsh + continueOnError: true + + posix: + run: | + echo "✅ Deployment completed! Container deployment was handled by predeploy hook." + WEB_APP_URL=$(azd env get-value WEB_APP_URL) + echo "🌐 Web app URL: $WEB_APP_URL" + shell: sh + continueOnError: true + +# Infrastructure configuration +infra: + provider: bicep + path: infra + module: main + parameters: + containerRegistryHostname: ${CONTAINER_REGISTRY_HOSTNAME=""} + containerImageName: ${CONTAINER_IMAGE_NAME="byc-wa-app"} + imageTag: ${IMAGE_TAG="latest"} \ No newline at end of file diff --git a/docs/DeploymentGuide.md b/docs/DeploymentGuide.md index d3498bfc..fecbf3dc 100644 --- a/docs/DeploymentGuide.md +++ b/docs/DeploymentGuide.md @@ -236,6 +236,13 @@ This will rebuild the source code, package it into a container, and push it to t ### 🛠️ Troubleshooting If you encounter any issues during the deployment process, please refer [troubleshooting](../docs/TroubleShootingSteps.md) document for detailed steps and solutions +## Deploy Your local changes +To deploy your local changes rename the below files. + 1. Rename `azure.yaml` to `azure_custom2.yaml` and `azure_custom.yaml` to `azure.yaml`. + 2. Go to `infra` directory + - Rename `main.bicep` to `main_custom2.bicep` and `main_custom.bicep` to `main.bicep`. +Continue with the [deploying steps](#deploying-with-azd). + ## Post Deployment Steps ### 1. Import Sample Data diff --git a/infra/main.bicep b/infra/main.bicep index 3076c175..eeddf416 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -550,7 +550,7 @@ module avmPrivateDnsZones 'br/public:avm/res/network/private-dns-zone:0.7.1' = [ ] // ==========Key Vault Module ========== // -var keyVaultName = 'KV-${solutionSuffix}' +var keyVaultName = 'kv-${solutionSuffix}' module keyvault 'br/public:avm/res/key-vault/vault:0.12.1' = { name: take('avm.res.key-vault.vault.${keyVaultName}', 64) params: { @@ -888,7 +888,7 @@ module avmStorageAccount 'br/public:avm/res/storage/storage-account:0.20.0' = { bypass: 'AzureServices' defaultAction: enablePrivateNetworking ? 'Deny' : 'Allow' } - allowBlobPublicAccess: enablePrivateNetworking ? true : false + allowBlobPublicAccess: false publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' // Private endpoints for blob and queue privateEndpoints: enablePrivateNetworking diff --git a/infra/main.json b/infra/main.json index edab9db4..db7909f8 100644 --- a/infra/main.json +++ b/infra/main.json @@ -5,8 +5,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "2150473040924531792" + "version": "0.37.4.10188", + "templateHash": "16197733130303817508" } }, "parameters": { @@ -70,11 +70,11 @@ "description": "Optional. Version of the GPT model to deploy." } }, - "azureOpenaiAPIVersion": { + "azureAIServicesAPIVersion": { "type": "string", "defaultValue": "2025-04-01-preview", "metadata": { - "description": "Optional. API version for the Azure OpenAI service." + "description": "Optional. API version for the Azure AI Services." } }, "gptModelCapacity": { @@ -330,7 +330,6 @@ "jumpboxVmName": "[take(format('vm-jumpbox-{0}', variables('solutionSuffix')), 15)]", "privateDnsZones": [ "privatelink.cognitiveservices.azure.com", - "privatelink.openai.azure.com", "privatelink.services.ai.azure.com", "privatelink.azurewebsites.net", "[format('privatelink.blob.{0}', environment().suffixes.storage)]", @@ -343,23 +342,21 @@ ], "dnsZoneIndex": { "cognitiveServices": 0, - "openAI": 1, - "aiServices": 2, - "appService": 3, - "storageBlob": 4, - "storageQueue": 5, - "storageFile": 6, - "cosmosDB": 7, - "keyVault": 8, - "sqlServer": 9, - "searchService": 10 + "aiServices": 1, + "appService": 2, + "storageBlob": 3, + "storageQueue": 4, + "storageFile": 5, + "cosmosDB": 6, + "keyVault": 7, + "sqlServer": 8, + "searchService": 9 }, "aiRelatedDnsZoneIndices": [ "[variables('dnsZoneIndex').cognitiveServices]", - "[variables('dnsZoneIndex').openAI]", "[variables('dnsZoneIndex').aiServices]" ], - "keyVaultName": "[format('KV-{0}', variables('solutionSuffix'))]", + "keyVaultName": "[format('kv-{0}', variables('solutionSuffix'))]", "useExistingAiFoundryAiProject": "[not(empty(parameters('existingFoundryProjectResourceId')))]", "aiFoundryAiServicesSubscriptionId": "[if(variables('useExistingAiFoundryAiProject'), split(parameters('existingFoundryProjectResourceId'), '/')[2], subscription().id)]", "aiFoundryAiServicesResourceGroupName": "[if(variables('useExistingAiFoundryAiProject'), split(parameters('existingFoundryProjectResourceId'), '/')[4], format('rg-{0}', variables('solutionSuffix')))]", @@ -459,7 +456,7 @@ "logAnalyticsWorkspace": { "condition": "[and(parameters('enableMonitoring'), not(variables('useExistingLogAnalytics')))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.operational-insights.workspace.{0}', variables('logAnalyticsWorkspaceResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -3565,7 +3562,7 @@ "applicationInsights": { "condition": "[parameters('enableMonitoring')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.insights.component.{0}', variables('applicationInsightsResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -4295,7 +4292,7 @@ }, "userAssignedIdentity": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.managed-identity.user-assigned-identity.{0}', variables('userAssignedIdentityResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -4777,7 +4774,7 @@ }, "sqlUserAssignedIdentity": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.managed-identity.user-assigned-identity.{0}', variables('sqlUserAssignedIdentityResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -5260,7 +5257,7 @@ "virtualNetwork": { "condition": "[parameters('enablePrivateNetworking')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('module.virtualNetwork.{0}', variables('solutionSuffix')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -5297,8 +5294,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "1734974014097019118" + "version": "0.37.4.10188", + "templateHash": "15908341678380884075" } }, "definitions": { @@ -5691,7 +5688,7 @@ }, "condition": "[not(empty(tryGet(parameters('subnets')[copyIndex()], 'networkSecurityGroup')))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.network.network-security-group.{0}.{1}', tryGet(parameters('subnets')[copyIndex()], 'networkSecurityGroup', 'name'), parameters('resourceSuffix')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -6343,7 +6340,7 @@ }, "virtualNetwork": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.network.virtual-network.{0}', parameters('name')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -8070,7 +8067,7 @@ "bastionHost": { "condition": "[parameters('enablePrivateNetworking')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.network.bastion-host.{0}', variables('bastionHostName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -9389,7 +9386,7 @@ "jumpboxVM": { "condition": "[parameters('enablePrivateNetworking')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.compute.virtual-machine.{0}', variables('jumpboxVmName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -17739,7 +17736,7 @@ }, "condition": "[and(parameters('enablePrivateNetworking'), or(empty(parameters('existingFoundryProjectResourceId')), not(contains(variables('aiRelatedDnsZoneIndices'), copyIndex()))))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('avm.res.network.private-dns-zone.{0}', split(variables('privateDnsZones')[copyIndex()], '.')[1])]", "properties": { "expressionEvaluationOptions": { @@ -20906,7 +20903,7 @@ }, "keyvault": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.key-vault.vault.{0}', variables('keyVaultName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -20981,11 +20978,11 @@ }, { "name": "AZURE-OPENAI-PREVIEW-API-VERSION", - "value": "[parameters('azureOpenaiAPIVersion')]" + "value": "[parameters('azureAIServicesAPIVersion')]" }, { "name": "AZURE-OPENAI-ENDPOINT", - "value": "[reference('aiFoundryAiServices').outputs.endpoints.value['OpenAI Language Model Instance API']]" + "value": "[reference('aiFoundryAiServices').outputs.endpoint.value]" }, { "name": "AZURE-OPENAI-EMBEDDING-MODEL", @@ -20998,6 +20995,10 @@ { "name": "AZURE-SEARCH-ENDPOINT", "value": "[format('https://{0}.search.windows.net', variables('aiSearchName'))]" + }, + { + "name": "AZURE-AI-AGENT-ENDPOINT", + "value": "[reference('aiFoundryAiServices').outputs.aiProjectInfo.value.apiEndpoint]" } ] }, @@ -24134,7 +24135,7 @@ "aiFoundryAiServices": { "condition": "[variables('aiFoundryAIservicesEnabled')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.cognitive-services.account.{0}', variables('aiFoundryAiServicesResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -24210,7 +24211,7 @@ }, "diagnosticSettings": "[if(parameters('enableMonitoring'), createObject('value', createArray(createObject('workspaceResourceId', if(variables('useExistingLogAnalytics'), parameters('existingLogAnalyticsWorkspaceId'), reference('logAnalyticsWorkspace').outputs.resourceId.value)))), createObject('value', null()))]", "publicNetworkAccess": "[if(parameters('enablePrivateNetworking'), createObject('value', 'Disabled'), createObject('value', 'Enabled'))]", - "privateEndpoints": "[if(and(parameters('enablePrivateNetworking'), empty(parameters('existingFoundryProjectResourceId'))), createObject('value', createArray(createObject('name', format('pep-{0}', variables('aiFoundryAiServicesResourceName')), 'customNetworkInterfaceName', format('nic-{0}', variables('aiFoundryAiServicesResourceName')), 'subnetResourceId', reference('virtualNetwork').outputs.pepsSubnetResourceId.value, 'privateDnsZoneGroup', createObject('privateDnsZoneGroupConfigs', createArray(createObject('name', 'ai-services-dns-zone-cognitiveservices', 'privateDnsZoneResourceId', reference(format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').cognitiveServices)).outputs.resourceId.value), createObject('name', 'ai-services-dns-zone-openai', 'privateDnsZoneResourceId', reference(format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').openAI)).outputs.resourceId.value), createObject('name', 'ai-services-dns-zone-aiservices', 'privateDnsZoneResourceId', reference(format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').aiServices)).outputs.resourceId.value)))))), createObject('value', createArray()))]", + "privateEndpoints": "[if(and(parameters('enablePrivateNetworking'), empty(parameters('existingFoundryProjectResourceId'))), createObject('value', createArray(createObject('name', format('pep-{0}', variables('aiFoundryAiServicesResourceName')), 'customNetworkInterfaceName', format('nic-{0}', variables('aiFoundryAiServicesResourceName')), 'subnetResourceId', reference('virtualNetwork').outputs.pepsSubnetResourceId.value, 'privateDnsZoneGroup', createObject('privateDnsZoneGroupConfigs', createArray(createObject('name', 'ai-services-dns-zone-cognitiveservices', 'privateDnsZoneResourceId', reference(format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').cognitiveServices)).outputs.resourceId.value), createObject('name', 'ai-services-dns-zone-aiservices', 'privateDnsZoneResourceId', reference(format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').aiServices)).outputs.resourceId.value)))))), createObject('value', createArray()))]", "deployments": { "value": [ { @@ -24249,8 +24250,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "12531718623039828267" + "version": "0.37.4.10188", + "templateHash": "11586648700335054863" }, "name": "Cognitive Services", "description": "This module deploys a Cognitive Service." @@ -25430,7 +25431,7 @@ "cognitive_service_dependencies": { "condition": "[not(variables('useExistingService'))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('cognitive_service_dependencies-{0}', uniqueString('cognitive_service_dependencies', deployment().name))]", "properties": { "expressionEvaluationOptions": { @@ -25482,8 +25483,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "1396927448823284485" + "version": "0.37.4.10188", + "templateHash": "8352858209491089788" } }, "definitions": { @@ -26408,8 +26409,6 @@ "Cognitive Services LUIS Writer": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '6322a993-d5c9-4bed-b113-e49bbea25b27')]", "Cognitive Services Metrics Advisor Administrator": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'cb43c632-a144-4ec5-977c-e80c4affc34a')]", "Cognitive Services Metrics Advisor User": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '3b20f47b-3825-43cb-8114-4bd2201156a8')]", - "Cognitive Services OpenAI Contributor": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'a001fd3d-188f-4b5d-821b-7da978bf7442')]", - "Cognitive Services OpenAI User": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd')]", "Cognitive Services QnA Maker Editor": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'f4cc2bf9-21be-47a1-bdf1-5c5804381025')]", "Cognitive Services QnA Maker Reader": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '466ccd10-b268-4a11-b098-b4849f024126')]", "Cognitive Services Speech Contributor": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '0e75ca1e-0464-4b4d-8b93-68208a576181')]", @@ -26522,7 +26521,7 @@ "count": "[length(coalesce(parameters('privateEndpoints'), createArray()))]" }, "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('{0}-cognitiveService-PrivateEndpoint-{1}', uniqueString(deployment().name, parameters('location')), copyIndex())]", "subscriptionId": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[2]]", "resourceGroup": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[4]]", @@ -27273,7 +27272,7 @@ "secretsExport": { "condition": "[not(equals(parameters('secretsExportConfiguration'), null()))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('{0}-secrets-kv', uniqueString(deployment().name, parameters('location')))]", "subscriptionId": "[split(tryGet(parameters('secretsExportConfiguration'), 'keyVaultResourceId'), '/')[2]]", "resourceGroup": "[split(tryGet(parameters('secretsExportConfiguration'), 'keyVaultResourceId'), '/')[4]]", @@ -27297,8 +27296,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "7420599935384266971" + "version": "0.37.4.10188", + "templateHash": "2491273843075489892" } }, "definitions": { @@ -27417,7 +27416,7 @@ "aiProject": { "condition": "[or(not(empty(parameters('projectName'))), not(empty(parameters('existingFoundryProjectResourceId'))))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('{0}-ai-project-{1}-deployment', parameters('name'), parameters('projectName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -27451,8 +27450,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "5676565623284126112" + "version": "0.37.4.10188", + "templateHash": "346451728741152022" } }, "definitions": { @@ -27631,7 +27630,7 @@ "existing_cognitive_service_dependencies": { "condition": "[variables('useExistingService')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('existing_cognitive_service_dependencies-{0}', uniqueString('existing_cognitive_service_dependencies', deployment().name))]", "subscriptionId": "[variables('existingCognitiveServiceDetails')[2]]", "resourceGroup": "[variables('existingCognitiveServiceDetails')[4]]", @@ -27688,8 +27687,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "1396927448823284485" + "version": "0.37.4.10188", + "templateHash": "8352858209491089788" } }, "definitions": { @@ -28614,8 +28613,6 @@ "Cognitive Services LUIS Writer": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '6322a993-d5c9-4bed-b113-e49bbea25b27')]", "Cognitive Services Metrics Advisor Administrator": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'cb43c632-a144-4ec5-977c-e80c4affc34a')]", "Cognitive Services Metrics Advisor User": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '3b20f47b-3825-43cb-8114-4bd2201156a8')]", - "Cognitive Services OpenAI Contributor": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'a001fd3d-188f-4b5d-821b-7da978bf7442')]", - "Cognitive Services OpenAI User": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd')]", "Cognitive Services QnA Maker Editor": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'f4cc2bf9-21be-47a1-bdf1-5c5804381025')]", "Cognitive Services QnA Maker Reader": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '466ccd10-b268-4a11-b098-b4849f024126')]", "Cognitive Services Speech Contributor": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '0e75ca1e-0464-4b4d-8b93-68208a576181')]", @@ -28728,7 +28725,7 @@ "count": "[length(coalesce(parameters('privateEndpoints'), createArray()))]" }, "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('{0}-cognitiveService-PrivateEndpoint-{1}', uniqueString(deployment().name, parameters('location')), copyIndex())]", "subscriptionId": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[2]]", "resourceGroup": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[4]]", @@ -29479,7 +29476,7 @@ "secretsExport": { "condition": "[not(equals(parameters('secretsExportConfiguration'), null()))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('{0}-secrets-kv', uniqueString(deployment().name, parameters('location')))]", "subscriptionId": "[split(tryGet(parameters('secretsExportConfiguration'), 'keyVaultResourceId'), '/')[2]]", "resourceGroup": "[split(tryGet(parameters('secretsExportConfiguration'), 'keyVaultResourceId'), '/')[4]]", @@ -29503,8 +29500,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "7420599935384266971" + "version": "0.37.4.10188", + "templateHash": "2491273843075489892" } }, "definitions": { @@ -29623,7 +29620,7 @@ "aiProject": { "condition": "[or(not(empty(parameters('projectName'))), not(empty(parameters('existingFoundryProjectResourceId'))))]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('{0}-ai-project-{1}-deployment', parameters('name'), parameters('projectName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -29657,8 +29654,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "5676565623284126112" + "version": "0.37.4.10188", + "templateHash": "346451728741152022" } }, "definitions": { @@ -29917,7 +29914,6 @@ "dependsOn": [ "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').cognitiveServices)]", "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').aiServices)]", - "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').openAI)]", "logAnalyticsWorkspace", "userAssignedIdentity", "virtualNetwork" @@ -29925,7 +29921,7 @@ }, "cosmosDb": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.document-db.database-account.{0}', variables('cosmosDbResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -33765,7 +33761,7 @@ }, "avmStorageAccount": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.storage.storage-account.{0}', variables('storageAccountName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -33814,7 +33810,9 @@ "defaultAction": "[if(parameters('enablePrivateNetworking'), 'Deny', 'Allow')]" } }, - "allowBlobPublicAccess": "[if(parameters('enablePrivateNetworking'), createObject('value', true()), createObject('value', false()))]", + "allowBlobPublicAccess": { + "value": false + }, "publicNetworkAccess": "[if(parameters('enablePrivateNetworking'), createObject('value', 'Disabled'), createObject('value', 'Enabled'))]", "privateEndpoints": "[if(parameters('enablePrivateNetworking'), createObject('value', createArray(createObject('name', format('pep-blob-{0}', variables('solutionSuffix')), 'privateDnsZoneGroup', createObject('privateDnsZoneGroupConfigs', createArray(createObject('name', 'storage-dns-zone-group-blob', 'privateDnsZoneResourceId', reference(format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').storageBlob)).outputs.resourceId.value))), 'subnetResourceId', reference('virtualNetwork').outputs.pepsSubnetResourceId.value, 'service', 'blob'), createObject('name', format('pep-queue-{0}', variables('solutionSuffix')), 'privateDnsZoneGroup', createObject('privateDnsZoneGroupConfigs', createArray(createObject('name', 'storage-dns-zone-group-queue', 'privateDnsZoneResourceId', reference(format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').storageQueue)).outputs.resourceId.value))), 'subnetResourceId', reference('virtualNetwork').outputs.pepsSubnetResourceId.value, 'service', 'queue'))), createObject('value', createArray()))]", "blobServices": { @@ -39533,7 +39531,7 @@ }, "saveStorageAccountSecretsInKeyVault": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('saveStorageAccountSecretsInKeyVault.{0}', variables('keyVaultName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -39577,7 +39575,7 @@ }, { "name": "ADLS-ACCOUNT-KEY", - "value": "[listOutputsWithSecureValues('avmStorageAccount', '2025-04-01').primaryAccessKey]" + "value": "[listOutputsWithSecureValues('avmStorageAccount', '2022-09-01').primaryAccessKey]" } ] } @@ -42705,7 +42703,7 @@ }, "sqlDBModule": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.sql.server.{0}', variables('sqlDbName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -49347,7 +49345,7 @@ }, "webServerFarm": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.web.serverfarm.{0}', variables('webServerFarmResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -49920,7 +49918,7 @@ }, "webSite": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('module.web-sites.{0}', variables('webSiteResourceName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -49977,20 +49975,20 @@ "AZURE_SEARCH_URL_COLUMN": "[variables('azureSearchUrlColumn')]", "AZURE_OPENAI_RESOURCE": "[reference('aiFoundryAiServices').outputs.name.value]", "AZURE_OPENAI_MODEL": "[parameters('gptModelName')]", - "AZURE_OPENAI_ENDPOINT": "[reference('aiFoundryAiServices').outputs.endpoints.value['OpenAI Language Model Instance API']]", + "AZURE_OPENAI_ENDPOINT": "[reference('aiFoundryAiServices').outputs.endpoint.value]", "AZURE_OPENAI_TEMPERATURE": "[variables('azureOpenAITemperature')]", "AZURE_OPENAI_TOP_P": "[variables('azureOpenAITopP')]", "AZURE_OPENAI_MAX_TOKENS": "[variables('azureOpenAIMaxTokens')]", "AZURE_OPENAI_STOP_SEQUENCE": "[variables('azureOpenAIStopSequence')]", "AZURE_OPENAI_SYSTEM_MESSAGE": "[variables('azureOpenAISystemMessage')]", - "AZURE_OPENAI_PREVIEW_API_VERSION": "[parameters('azureOpenaiAPIVersion')]", + "AZURE_OPENAI_PREVIEW_API_VERSION": "[parameters('azureAIServicesAPIVersion')]", "AZURE_OPENAI_STREAM": "[variables('azureOpenAIStream')]", "AZURE_SEARCH_QUERY_TYPE": "[variables('azureSearchQueryType')]", "AZURE_SEARCH_VECTOR_COLUMNS": "[variables('azureSearchVectorFields')]", "AZURE_SEARCH_PERMITTED_GROUPS_COLUMN": "[variables('azureSearchPermittedGroupsField')]", "AZURE_SEARCH_STRICTNESS": "[variables('azureSearchStrictness')]", "AZURE_OPENAI_EMBEDDING_NAME": "[parameters('embeddingModel')]", - "AZURE_OPENAI_EMBEDDING_ENDPOINT": "[reference('aiFoundryAiServices').outputs.endpoints.value['OpenAI Language Model Instance API']]", + "AZURE_OPENAI_EMBEDDING_ENDPOINT": "[reference('aiFoundryAiServices').outputs.endpoint.value]", "SQLDB_SERVER": "[variables('sqlServerFqdn')]", "SQLDB_DATABASE": "[variables('sqlDbName')]", "USE_INTERNAL_STREAM": "[variables('useInternalStream')]", @@ -50006,7 +50004,7 @@ "USE_AI_PROJECT_CLIENT": "[variables('useAIProjectClientFlag')]", "AZURE_AI_AGENT_ENDPOINT": "[reference('aiFoundryAiServices').outputs.aiProjectInfo.value.apiEndpoint]", "AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME": "[parameters('gptModelName')]", - "AZURE_AI_AGENT_API_VERSION": "[parameters('azureOpenaiAPIVersion')]", + "AZURE_AI_AGENT_API_VERSION": "[parameters('azureAIServicesAPIVersion')]", "AZURE_SEARCH_CONNECTION_NAME": "[variables('aiSearchName')]", "AZURE_CLIENT_ID": "[reference('userAssignedIdentity').outputs.clientId.value]" }, @@ -50029,8 +50027,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "14170137035624875111" + "version": "0.37.4.10188", + "templateHash": "4298119334635398540" } }, "definitions": { @@ -51007,7 +51005,7 @@ "count": "[length(coalesce(parameters('configs'), createArray()))]" }, "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('{0}-Site-Config-{1}', uniqueString(deployment().name, parameters('location')), copyIndex())]", "properties": { "expressionEvaluationOptions": { @@ -51042,8 +51040,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "16983009113856606195" + "version": "0.37.4.10188", + "templateHash": "4653685834544796273" }, "name": "Site App Settings", "description": "This module deploys a Site App Setting." @@ -51188,7 +51186,7 @@ "count": "[length(coalesce(parameters('privateEndpoints'), createArray()))]" }, "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[format('{0}-app-PrivateEndpoint-{1}', uniqueString(deployment().name, parameters('location')), copyIndex())]", "subscriptionId": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[2]]", "resourceGroup": "[split(coalesce(tryGet(coalesce(parameters('privateEndpoints'), createArray())[copyIndex()], 'resourceGroupResourceId'), resourceGroup().id), '/')[4]]", @@ -52014,7 +52012,7 @@ }, "searchService": { "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "[take(format('avm.res.search.search-service.{0}', variables('aiSearchName')), 64)]", "properties": { "expressionEvaluationOptions": { @@ -54394,7 +54392,7 @@ "existing_AIProject_SearchConnectionModule": { "condition": "[variables('useExistingAiFoundryAiProject')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "aiProjectSearchConnectionDeployment", "subscriptionId": "[variables('aiFoundryAiServicesSubscriptionId')]", "resourceGroup": "[variables('aiFoundryAiServicesResourceGroupName')]", @@ -54429,8 +54427,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "11311597701635556530" + "version": "0.37.4.10188", + "templateHash": "6038840175458269917" } }, "parameters": { @@ -54498,7 +54496,7 @@ "searchServiceToExistingAiServicesRoleAssignment": { "condition": "[variables('useExistingAiFoundryAiProject')]", "type": "Microsoft.Resources/deployments", - "apiVersion": "2025-04-01", + "apiVersion": "2022-09-01", "name": "searchToExistingAiServices-roleAssignment", "subscriptionId": "[variables('aiFoundryAiServicesSubscriptionId')]", "resourceGroup": "[variables('aiFoundryAiServicesResourceGroupName')]", @@ -54524,8 +54522,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "9717690292313179013" + "version": "0.37.4.10188", + "templateHash": "3644919950024112374" } }, "parameters": { @@ -54706,7 +54704,7 @@ "metadata": { "description": "The API version used for the Azure AI Agent service." }, - "value": "[parameters('azureOpenaiAPIVersion')]" + "value": "[parameters('azureAIServicesAPIVersion')]" }, "AZURE_AI_AGENT_ENDPOINT": { "type": "string", @@ -54769,7 +54767,7 @@ "metadata": { "description": "The endpoint URL for the Azure OpenAI Embedding model." }, - "value": "[reference('aiFoundryAiServices').outputs.endpoints.value['OpenAI Language Model Instance API']]" + "value": "[reference('aiFoundryAiServices').outputs.endpoint.value]" }, "AZURE_OPENAI_EMBEDDING_NAME": { "type": "string", @@ -54783,7 +54781,7 @@ "metadata": { "description": "The endpoint URL for the Azure OpenAI service." }, - "value": "[reference('aiFoundryAiServices').outputs.endpoints.value['OpenAI Language Model Instance API']]" + "value": "[reference('aiFoundryAiServices').outputs.endpoint.value]" }, "AZURE_OPENAI_MAX_TOKENS": { "type": "string", @@ -54804,7 +54802,7 @@ "metadata": { "description": "The preview API version for Azure OpenAI." }, - "value": "[parameters('azureOpenaiAPIVersion')]" + "value": "[parameters('azureAIServicesAPIVersion')]" }, "AZURE_OPENAI_RESOURCE": { "type": "string", diff --git a/infra/main_custom.bicep b/infra/main_custom.bicep new file mode 100644 index 00000000..f089dbf2 --- /dev/null +++ b/infra/main_custom.bicep @@ -0,0 +1,1245 @@ +// ========== main_custom.bicep ========== // +// Developer-friendly version with customer ACR and user permissions for local debugging +targetScope = 'resourceGroup' + +@minLength(3) +@maxLength(20) +@description('Required. A unique prefix for all resources in this deployment. This should be 3-20 characters long:') +param solutionName string = 'clientadvisor' + +@description('Optional. Existing Log Analytics Workspace Resource ID') +param existingLogAnalyticsWorkspaceId string = '' + +@description('Optional. CosmosDB Location') +param cosmosLocation string = 'eastus2' + +@minLength(1) +@description('Optional. GPT model deployment type:') +@allowed([ + 'Standard' + 'GlobalStandard' +]) +param gptModelDeploymentType string = 'GlobalStandard' + +@minLength(1) +@description('Optional. Name of the GPT model to deploy:') +@allowed([ + 'gpt-4o-mini' +]) +param gptModelName string = 'gpt-4o-mini' + +@description('Optional. Version of the GPT model to deploy.') +param gptModelVersion string = '2024-07-18' + +@description('Optional. Version of the GPT model to deploy.') +param embeddingModelVersion string = '2' + +@description('Optional. API version for the Azure OpenAI service.') +param azureOpenaiAPIVersion string = '2025-04-01-preview' + +@minValue(10) +@description('Optional. Capacity of the GPT deployment:') +param gptModelCapacity int = 200 + +@minLength(1) +@description('Optional. Name of the Text Embedding model to deploy:') +@allowed([ + 'text-embedding-ada-002' +]) +param embeddingModel string = 'text-embedding-ada-002' + +@minValue(10) +@description('Optional. Capacity of the Embedding Model deployment') +param embeddingDeploymentCapacity int = 80 + +@allowed([ + 'australiaeast' + 'eastus' + 'eastus2' + 'francecentral' + 'japaneast' + 'swedencentral' + 'uksouth' + 'westus' + 'westus3' +]) +@metadata({ + azd: { + type: 'location' + usageName: [ + 'OpenAI.GlobalStandard.gpt-4o-mini,200' + 'OpenAI.GlobalStandard.text-embedding-ada-002,80' + ] + } +}) +@description('Required. Location for AI Foundry deployment. This is the location where the AI Foundry resources will be deployed.') +param azureAiServiceLocation string + +@allowed([ + 'australiaeast' + 'centralus' + 'eastasia' + 'eastus2' + 'japaneast' + 'northeurope' + 'southeastasia' + 'uksouth' +]) +@metadata({ azd: { type: 'location' } }) +@description('Required. Azure region for all services. Regions are restricted to guarantee compatibility with paired regions and replica locations for data redundancy and failover scenarios.') +param location string +var solutionLocation = empty(location) ? resourceGroup().location : location + +@maxLength(5) +@description('Optional. A unique token for the solution. This is used to ensure resource names are unique for global resources.') +param solutionUniqueToken string = substring(uniqueString(subscription().id, resourceGroup().name, solutionName), 0, 5) + +var solutionSuffix = toLower(trim(replace( + replace( + replace(replace(replace(replace('${solutionName}${solutionUniqueToken}', '-', ''), '_', ''), '.', ''), '/', ''), + ' ', + '' + ), + '*', + '' +))) + +@description('Optional. Enable private networking for applicable resources, aligned with the Well Architected Framework recommendations. Defaults to false.') +param enablePrivateNetworking bool = false + +@description('Optional. Enable monitoring applicable resources, aligned with the Well Architected Framework recommendations.') +param enableMonitoring bool = false + +@description('Optional. Enable scalability for applicable resources, aligned with the Well Architected Framework recommendations.') +param enableScalability bool = false + +@description('Optional. Enable/Disable usage telemetry for module.') +param enableTelemetry bool = true + +@description('Optional. Enable redundancy for applicable resources, aligned with the Well Architected Framework recommendations.') +param enableRedundancy bool = false + +// ========== DEVELOPER EXPERIENCE: Customer ACR Configuration ========== // +var acrName = 'acr${solutionSuffix}' +var containerRegistryHostnameActual = empty(containerRegistryHostname) ? '${acrName}.azurecr.io' : containerRegistryHostname +@description('Customer Container Registry hostname where the docker images are located.') +param containerRegistryHostname string = '' + +@description('Optional. The Container Image Name to deploy on the webapp.') +param containerImageName string = 'byc-wa-app' + +@description('Optional. The Container Image Tag to deploy on the webapp.') +param imageTag string = 'latest' + +@description('Optional. Resource ID of an existing Foundry project') +param existingFoundryProjectResourceId string = '' + +@description('Optional. Enable purge protection for the Key Vault') +param enablePurgeProtection bool = false + +// Application configuration variables +var appEnvironment = 'Prod' +var azureSearchIndex = 'transcripts_index' +var azureSearchUseSemanticSearch = 'True' +var azureSearchSemanticSearchConfig = 'my-semantic-config' +var azureSearchTopK = '5' +var azureSearchContentColumns = 'content' +var azureSearchFilenameColumn = 'chunk_id' +var azureSearchTitleColumn = 'client_id' +var azureSearchUrlColumn = 'sourceurl' +var azureOpenAITemperature = '0' +var azureOpenAITopP = '1' +var azureOpenAIMaxTokens = '1000' +var azureOpenAIStopSequence = '\n' +var azureOpenAISystemMessage = '''You are a helpful Wealth Advisor assistant''' +var azureOpenAIStream = 'True' +var azureSearchQueryType = 'simple' +var azureSearchVectorFields = 'contentVector' +var azureSearchPermittedGroupsField = '' +var azureSearchStrictness = '3' +var azureSearchEnableInDomain = 'False' +var azureCosmosDbEnableFeedback = 'True' +var useInternalStream = 'True' +var useAIProjectClientFlag = 'False' +var sqlServerFqdn = 'sql-${solutionSuffix}${environment().suffixes.sqlServerHostname}' + +@description('Optional. Size of the Jumpbox Virtual Machine when created.') +param vmSize string? + +@description('Optional. Admin username for the Jumpbox Virtual Machine.') +@secure() +param vmAdminUsername string? + +@description('Optional. Admin password for the Jumpbox Virtual Machine.') +@secure() +param vmAdminPassword string? + +// System prompts +var functionAppSqlPrompt = '''Generate a valid T-SQL query to find {query} for tables and columns provided below: + 1. Table: Clients + Columns: ClientId, Client, Email, Occupation, MaritalStatus, Dependents + 2. Table: InvestmentGoals + Columns: ClientId, InvestmentGoal + 3. Table: Assets + Columns: ClientId, AssetDate, Investment, ROI, Revenue, AssetType + 4. Table: ClientSummaries + Columns: ClientId, ClientSummary + 5. Table: InvestmentGoalsDetails + Columns: ClientId, InvestmentGoal, TargetAmount, Contribution + 6. Table: Retirement + Columns: ClientId, StatusDate, RetirementGoalProgress, EducationGoalProgress + 7. Table: ClientMeetings + Columns: ClientId, ConversationId, Title, StartTime, EndTime, Advisor, ClientEmail + Always use the Investment column from the Assets table as the value. + Assets table has snapshots of values by date. Do not add numbers across different dates for total values. + Do not use client name in filters. + Do not include assets values unless asked for. + ALWAYS use ClientId = {clientid} in the query filter. + ALWAYS select Client Name (Column: Client) in the query. + Query filters are IMPORTANT. Add filters like AssetType, AssetDate, etc. if needed. + When answering scheduling or time-based meeting questions, always use the StartTime column from ClientMeetings table. + Only return the generated SQL query. Do not return anything else.''' + +var functionAppCallTranscriptSystemPrompt = '''You are an assistant who supports wealth advisors in preparing for client meetings. + You have access to the client's past meeting call transcripts. + When answering questions, especially summary requests, provide a detailed and structured response that includes key topics, concerns, decisions, and trends. + If no data is available, state 'No relevant data found for previous meetings.''' + +var functionAppStreamTextSystemPrompt = '''The currently selected client's name is '{SelectedClientName}'. Treat any case-insensitive or partial mention as referring to this client. + If the user mentions no name, assume they are asking about '{SelectedClientName}'. + If the user references a name that clearly differs from '{SelectedClientName}' or comparing with other clients, respond only with: 'Please only ask questions about the selected client or select another client.' Otherwise, provide thorough answers for every question using only data from SQL or call transcripts.' + If no data is found, respond with 'No data found for that client.' Remove any client identifiers from the final response. + Always send clientId as '{client_id}'.''' + +// Region configuration +var replicaRegionPairs = { + australiaeast: 'australiasoutheast' + centralus: 'westus' + eastasia: 'japaneast' + eastus: 'centralus' + eastus2: 'centralus' + japaneast: 'eastasia' + northeurope: 'westeurope' + southeastasia: 'eastasia' + uksouth: 'westeurope' + westeurope: 'northeurope' +} +var replicaLocation = replicaRegionPairs[resourceGroup().location] + +@description('Optional. The tags to apply to all deployed Azure resources.') +param tags resourceInput<'Microsoft.Resources/resourceGroups@2025-04-01'>.tags = {} + +var cosmosDbZoneRedundantHaRegionPairs = { + australiaeast: 'uksouth' + centralus: 'eastus2' + eastasia: 'southeastasia' + eastus: 'centralus' + eastus2: 'centralus' + japaneast: 'australiaeast' + northeurope: 'westeurope' + southeastasia: 'eastasia' + uksouth: 'westeurope' + westeurope: 'northeurope' +} + +var allTags = union( + { + 'azd-env-name': solutionName + }, + tags +) + +var cosmosDbHaLocation = cosmosDbZoneRedundantHaRegionPairs[resourceGroup().location] +var useExistingLogAnalytics = !empty(existingLogAnalyticsWorkspaceId) +var logAnalyticsWorkspaceResourceId = useExistingLogAnalytics ? existingLogAnalyticsWorkspaceId : logAnalyticsWorkspace!.outputs.resourceId + +@description('Tag, Created by user name') +param createdBy string = contains(deployer(), 'userPrincipalName')? split(deployer().userPrincipalName, '@')[0]: deployer().objectId + +// ========== Resource Group Tag ========== // +resource resourceGroupTags 'Microsoft.Resources/tags@2021-04-01' = { + name: 'default' + properties: { + tags: { + ...resourceGroup().tags + ...tags + TemplateName: 'Client Advisor - Developer Experience' + Type: enablePrivateNetworking ? 'WAF' : 'Non-WAF' + CreatedBy: createdBy + DeploymentName: deployment().name + } + } +} + +// ========== Log Analytics Workspace ========== // +var logAnalyticsWorkspaceResourceName = 'log-${solutionSuffix}' +module logAnalyticsWorkspace 'br/public:avm/res/operational-insights/workspace:0.12.0' = if (enableMonitoring && !useExistingLogAnalytics) { + name: take('avm.res.operational-insights.workspace.${logAnalyticsWorkspaceResourceName}', 64) + params: { + name: logAnalyticsWorkspaceResourceName + tags: tags + location: solutionLocation + enableTelemetry: enableTelemetry + skuName: 'PerGB2018' + dataRetention: 365 + features: { enableLogAccessUsingOnlyResourcePermissions: true } + diagnosticSettings: [{ useThisWorkspace: true }] + dailyQuotaGb: enableRedundancy ? 10 : null + replication: enableRedundancy + ? { + enabled: true + location: replicaLocation + } + : null + publicNetworkAccessForIngestion: enablePrivateNetworking ? 'Disabled' : 'Enabled' + publicNetworkAccessForQuery: enablePrivateNetworking ? 'Disabled' : 'Enabled' + } +} + +// ========== Application Insights ========== // +var applicationInsightsResourceName = 'appi-${solutionSuffix}' +module applicationInsights 'br/public:avm/res/insights/component:0.6.0' = if (enableMonitoring) { + name: take('avm.res.insights.component.${applicationInsightsResourceName}', 64) + params: { + name: applicationInsightsResourceName + tags: tags + location: solutionLocation + enableTelemetry: enableTelemetry + retentionInDays: 365 + kind: 'web' + disableIpMasking: false + flowType: 'Bluefield' + workspaceResourceId: enableMonitoring ? logAnalyticsWorkspaceResourceId : '' + diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + } +} + +// ========== User Assigned Identity ========== // +var userAssignedIdentityResourceName = 'id-${solutionSuffix}' +module userAssignedIdentity 'br/public:avm/res/managed-identity/user-assigned-identity:0.4.1' = { + name: take('avm.res.managed-identity.user-assigned-identity.${userAssignedIdentityResourceName}', 64) + params: { + name: userAssignedIdentityResourceName + location: solutionLocation + tags: tags + enableTelemetry: enableTelemetry + } +} + +// ========== SQL Operations User Assigned Identity ========== // +var sqlUserAssignedIdentityResourceName = 'id-sql-${solutionSuffix}' +module sqlUserAssignedIdentity 'br/public:avm/res/managed-identity/user-assigned-identity:0.4.1' = { + name: take('avm.res.managed-identity.user-assigned-identity.${sqlUserAssignedIdentityResourceName}', 64) + params: { + name: sqlUserAssignedIdentityResourceName + location: solutionLocation + tags: tags + enableTelemetry: enableTelemetry + } +} + +// ========== DEVELOPER EXPERIENCE: Azure Container Registry ========== // +module containerRegistry 'br/public:avm/res/container-registry/registry:0.5.0' = { + name: take('avm.res.container-registry.registry.${acrName}', 64) + params: { + name: acrName + location: solutionLocation + tags: tags + enableTelemetry: enableTelemetry + acrSku: 'Basic' + acrAdminUserEnabled: false + publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' + roleAssignments: [ + { + principalId: userAssignedIdentity.outputs.principalId + roleDefinitionIdOrName: 'AcrPush' + principalType: 'ServicePrincipal' + } + { + principalId: userAssignedIdentity.outputs.principalId + roleDefinitionIdOrName: 'AcrPull' + principalType: 'ServicePrincipal' + } + { + principalId: deployer().objectId + roleDefinitionIdOrName: 'AcrPush' + principalType: 'User' + } + { + principalId: deployer().objectId + roleDefinitionIdOrName: 'AcrPull' + principalType: 'User' + } + ] + diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + } +} + +// ========== Virtual Network and Networking Components ========== // +module virtualNetwork 'modules/virtualNetwork.bicep' = if (enablePrivateNetworking) { + name: take('module.virtualNetwork.${solutionSuffix}', 64) + params: { + name: 'vnet-${solutionSuffix}' + addressPrefixes: ['10.0.0.0/20'] + location: solutionLocation + tags: allTags + logAnalyticsWorkspaceId: logAnalyticsWorkspaceResourceId + resourceSuffix: solutionSuffix + enableTelemetry: enableTelemetry + } +} + +// ========== Private DNS Zones ========== // +var privateDnsZones = [ + 'privatelink.cognitiveservices.azure.com' + 'privatelink.openai.azure.com' + 'privatelink.services.ai.azure.com' + 'privatelink.azurewebsites.net' + 'privatelink.blob.${environment().suffixes.storage}' + 'privatelink.queue.${environment().suffixes.storage}' + 'privatelink.file.${environment().suffixes.storage}' + 'privatelink.documents.azure.com' + 'privatelink.vaultcore.azure.net' + 'privatelink${environment().suffixes.sqlServerHostname}' + 'privatelink.search.windows.net' + 'privatelink.azurecr.io' +] + +var dnsZoneIndex = { + cognitiveServices: 0 + openAI: 1 + aiServices: 2 + appService: 3 + storageBlob: 4 + storageQueue: 5 + storageFile: 6 + cosmosDB: 7 + keyVault: 8 + sqlServer: 9 + searchService: 10 + containerRegistry: 11 +} + +var aiRelatedDnsZoneIndices = [ + dnsZoneIndex.cognitiveServices + dnsZoneIndex.openAI + dnsZoneIndex.aiServices +] + +@batchSize(5) +module avmPrivateDnsZones 'br/public:avm/res/network/private-dns-zone:0.7.1' = [ + for (zone, i) in privateDnsZones: if (enablePrivateNetworking && (empty(existingFoundryProjectResourceId) || !contains(aiRelatedDnsZoneIndices, i))) { + name: 'avm.res.network.private-dns-zone.${split(zone, '.')[1]}' + params: { + name: zone + tags: tags + enableTelemetry: enableTelemetry + virtualNetworkLinks: [ + { + name: take('vnetlink-${virtualNetwork!.outputs.name}-${split(zone, '.')[1]}', 80) + virtualNetworkResourceId: virtualNetwork!.outputs.resourceId + } + ] + } + } +] + +// ========== Key Vault Module ========== // +var keyVaultName = 'kv-${solutionSuffix}' +module keyvault 'br/public:avm/res/key-vault/vault:0.12.1' = { + name: take('avm.res.key-vault.vault.${keyVaultName}', 64) + params: { + name: keyVaultName + location: solutionLocation + tags: tags + sku: 'standard' + publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' + networkAcls: { + defaultAction: 'Allow' + } + enableVaultForDeployment: true + enableVaultForDiskEncryption: true + enableVaultForTemplateDeployment: true + enableRbacAuthorization: true + enableSoftDelete: true + enablePurgeProtection: enablePurgeProtection + softDeleteRetentionInDays: 7 + diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : [] + privateEndpoints: enablePrivateNetworking + ? [ + { + name: 'pep-${keyVaultName}' + customNetworkInterfaceName: 'nic-${keyVaultName}' + privateDnsZoneGroup: { + privateDnsZoneGroupConfigs: [ + { privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.keyVault]!.outputs.resourceId } + ] + } + service: 'vault' + subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + } + ] + : [] + roleAssignments: [ + { + principalId: userAssignedIdentity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: 'Key Vault Administrator' + } + { + principalId: sqlUserAssignedIdentity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: 'Key Vault Secrets User' + } + // DEVELOPER EXPERIENCE: Add deployer permissions + { + principalId: deployer().objectId + principalType: 'User' + roleDefinitionIdOrName: 'Key Vault Administrator' + } + ] + secrets: [ + { + name: 'SQLDB-SERVER' + value: sqlServerFqdn + } + { + name: 'SQLDB-DATABASE' + value: sqlDbName + } + { + name: 'AZURE-OPENAI-PREVIEW-API-VERSION' + value: azureOpenaiAPIVersion + } + ] + enableTelemetry: enableTelemetry + } +} + +// ========== AI Foundry: AI Services ========== // +var useExistingAiFoundryAiProject = !empty(existingFoundryProjectResourceId) + +var aiFoundryAiServicesSubscriptionId = useExistingAiFoundryAiProject + ? split(existingFoundryProjectResourceId, '/')[2] + : subscription().id +var aiFoundryAiServicesResourceGroupName = useExistingAiFoundryAiProject + ? split(existingFoundryProjectResourceId, '/')[4] + : 'rg-${solutionSuffix}' +var aiFoundryAiServicesResourceName = useExistingAiFoundryAiProject + ? split(existingFoundryProjectResourceId, '/')[8] + : 'aif-${solutionSuffix}' +var aiFoundryAiProjectResourceName = useExistingAiFoundryAiProject + ? split(existingFoundryProjectResourceId, '/')[10] + : 'proj-${solutionSuffix}' + +var aiFoundryAiServicesAiProjectResourceName = 'proj-${solutionSuffix}' +var aiFoundryAIservicesEnabled = true +var aiFoundryAiServicesModelDeployment = { + format: 'OpenAI' + name: gptModelName + version: gptModelVersion + sku: { + name: gptModelDeploymentType + capacity: gptModelCapacity + } + raiPolicyName: 'Microsoft.Default' +} + +var aiFoundryAiServicesEmbeddingModel = { + name: embeddingModel + version: embeddingModelVersion + sku: { + name: 'GlobalStandard' + capacity: embeddingDeploymentCapacity + } + raiPolicyName: 'Microsoft.Default' +} + +module aiFoundryAiServices 'modules/ai-services.bicep' = if (aiFoundryAIservicesEnabled) { + name: take('avm.res.cognitive-services.account.${aiFoundryAiServicesResourceName}', 64) + params: { + name: aiFoundryAiServicesResourceName + location: azureAiServiceLocation + tags: tags + existingFoundryProjectResourceId: existingFoundryProjectResourceId + projectName: aiFoundryAiServicesAiProjectResourceName + projectDescription: 'AI Foundry Project' + sku: 'S0' + kind: 'AIServices' + disableLocalAuth: true + customSubDomainName: aiFoundryAiServicesResourceName + apiProperties: {} + networkAcls: { + defaultAction: 'Allow' + virtualNetworkRules: [] + ipRules: [] + } + managedIdentities: { userAssignedResourceIds: [userAssignedIdentity!.outputs.resourceId] } + roleAssignments: [ + { + roleDefinitionIdOrName: '53ca6127-db72-4b80-b1b0-d745d6d5456d' // Azure AI User + principalId: userAssignedIdentity.outputs.principalId + principalType: 'ServicePrincipal' + } + { + roleDefinitionIdOrName: '64702f94-c441-49e6-a78b-ef80e0188fee' // Azure AI Developer + principalId: userAssignedIdentity.outputs.principalId + principalType: 'ServicePrincipal' + } + { + roleDefinitionIdOrName: '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd' // Cognitive Services OpenAI User + principalId: userAssignedIdentity.outputs.principalId + principalType: 'ServicePrincipal' + } + // DEVELOPER EXPERIENCE: Add deployer permissions for local debugging + { + roleDefinitionIdOrName: '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd' // Cognitive Services OpenAI User + principalId: deployer().objectId + principalType: 'User' + } + { + roleDefinitionIdOrName: '64702f94-c441-49e6-a78b-ef80e0188fee' // Azure AI Developer + principalId: deployer().objectId + principalType: 'User' + } + { + roleDefinitionIdOrName: '53ca6127-db72-4b80-b1b0-d745d6d5456d' // Azure AI User + principalId: deployer().objectId + principalType: 'User' + } + ] + diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' + privateEndpoints: (enablePrivateNetworking && empty(existingFoundryProjectResourceId)) + ? ([ + { + name: 'pep-${aiFoundryAiServicesResourceName}' + customNetworkInterfaceName: 'nic-${aiFoundryAiServicesResourceName}' + subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + privateDnsZoneGroup: { + privateDnsZoneGroupConfigs: [ + { + name: 'ai-services-dns-zone-cognitiveservices' + privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.cognitiveServices]!.outputs.resourceId + } + { + name: 'ai-services-dns-zone-openai' + privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.openAI]!.outputs.resourceId + } + { + name: 'ai-services-dns-zone-aiservices' + privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.aiServices]!.outputs.resourceId + } + ] + } + } + ]) + : [] + deployments: [ + { + name: aiFoundryAiServicesModelDeployment.name + model: { + format: aiFoundryAiServicesModelDeployment.format + name: aiFoundryAiServicesModelDeployment.name + version: aiFoundryAiServicesModelDeployment.version + } + raiPolicyName: aiFoundryAiServicesModelDeployment.raiPolicyName + sku: { + name: aiFoundryAiServicesModelDeployment.sku.name + capacity: aiFoundryAiServicesModelDeployment.sku.capacity + } + } + { + name: aiFoundryAiServicesEmbeddingModel.name + model: { + format: 'OpenAI' + name: aiFoundryAiServicesEmbeddingModel.name + version: aiFoundryAiServicesEmbeddingModel.version + } + raiPolicyName: aiFoundryAiServicesEmbeddingModel.raiPolicyName + sku: { + name: aiFoundryAiServicesEmbeddingModel.sku.name + capacity: aiFoundryAiServicesEmbeddingModel.sku.capacity + } + } + ] + } +} + +// ========== DEVELOPER EXPERIENCE: Cosmos DB with User Permissions ========== // +var cosmosDbResourceName = 'cosmos-${solutionSuffix}' +var cosmosDbDatabaseName = 'db_conversation_history' +var collectionName = 'conversations' +module cosmosDb 'br/public:avm/res/document-db/database-account:0.15.0' = { + name: take('avm.res.document-db.database-account.${cosmosDbResourceName}', 64) + params: { + name: cosmosDbResourceName + location: cosmosLocation + tags: tags + enableTelemetry: enableTelemetry + sqlDatabases: [ + { + name: cosmosDbDatabaseName + containers: [ + { + name: collectionName + paths: [ + '/userId' + ] + } + ] + } + ] + dataPlaneRoleDefinitions: [ + { + roleName: 'Cosmos DB SQL Data Contributor' + dataActions: [ + 'Microsoft.DocumentDB/databaseAccounts/readMetadata' + 'Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/*' + 'Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/items/*' + ] + assignments: [ + { principalId: userAssignedIdentity.outputs.principalId } + // DEVELOPER EXPERIENCE: Add deployer permissions for local debugging + { principalId: deployer().objectId } + ] + } + ] + diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + networkRestrictions: { + networkAclBypass: 'None' + publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' + } + privateEndpoints: enablePrivateNetworking + ? [ + { + name: 'pep-${cosmosDbResourceName}' + customNetworkInterfaceName: 'nic-${cosmosDbResourceName}' + privateDnsZoneGroup: { + privateDnsZoneGroupConfigs: [ + { privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.cosmosDB]!.outputs.resourceId } + ] + } + service: 'Sql' + subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + } + ] + : [] + zoneRedundant: enableRedundancy ? true : false + capabilitiesToAdd: enableRedundancy ? null : ['EnableServerless'] + automaticFailover: enableRedundancy ? true : false + failoverLocations: enableRedundancy + ? [ + { + failoverPriority: 0 + isZoneRedundant: true + locationName: solutionLocation + } + { + failoverPriority: 1 + isZoneRedundant: true + locationName: cosmosDbHaLocation + } + ] + : [ + { + locationName: solutionLocation + failoverPriority: 0 + isZoneRedundant: enableRedundancy + } + ] + } + dependsOn: [keyvault, avmStorageAccount] +} + +// ========== Storage account module ========== // +var storageAccountName = 'st${solutionSuffix}' +module avmStorageAccount 'br/public:avm/res/storage/storage-account:0.20.0' = { + name: take('avm.res.storage.storage-account.${storageAccountName}', 64) + params: { + name: storageAccountName + location: solutionLocation + managedIdentities: { systemAssigned: true } + minimumTlsVersion: 'TLS1_2' + enableTelemetry: enableTelemetry + tags: tags + accessTier: 'Hot' + supportsHttpsTrafficOnly: true + roleAssignments: [ + { + principalId: userAssignedIdentity.outputs.principalId + roleDefinitionIdOrName: 'Storage Blob Data Contributor' + principalType: 'ServicePrincipal' + } + // DEVELOPER EXPERIENCE: Add deployer permissions + { + principalId: deployer().objectId + roleDefinitionIdOrName: 'Storage Blob Data Contributor' + principalType: 'User' + } + ] + networkAcls: { + bypass: 'AzureServices' + defaultAction: enablePrivateNetworking ? 'Deny' : 'Allow' + } + allowBlobPublicAccess: false + publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' + privateEndpoints: enablePrivateNetworking + ? [ + { + name: 'pep-blob-${solutionSuffix}' + privateDnsZoneGroup: { + privateDnsZoneGroupConfigs: [ + { + name: 'storage-dns-zone-group-blob' + privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.storageBlob]!.outputs.resourceId + } + ] + } + subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + service: 'blob' + } + { + name: 'pep-queue-${solutionSuffix}' + privateDnsZoneGroup: { + privateDnsZoneGroupConfigs: [ + { + name: 'storage-dns-zone-group-queue' + privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.storageQueue]!.outputs.resourceId + } + ] + } + subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + service: 'queue' + } + ] + : [] + blobServices: { + corsRules: [] + deleteRetentionPolicyEnabled: false + containers: [ + { + name: 'data' + publicAccess: 'None' + denyEncryptionScopeOverride: false + defaultEncryptionScope: '$account-encryption-key' + } + ] + } + } + dependsOn: [keyvault] +} + +// ========== SQL module ========== // +var sqlDbName = 'sqldb-${solutionSuffix}' +module sqlDBModule 'br/public:avm/res/sql/server:0.20.1' = { + name: take('avm.res.sql.server.${sqlDbName}', 64) + params: { + name: 'sql-${solutionSuffix}' + administrators: { + azureADOnlyAuthentication: true + login: userAssignedIdentity.outputs.name + principalType: 'Application' + sid: userAssignedIdentity.outputs.principalId + tenantId: subscription().tenantId + } + connectionPolicy: 'Redirect' + databases: [ + { + zoneRedundant: enableRedundancy + availabilityZone: -1 + collation: 'SQL_Latin1_General_CP1_CI_AS' + diagnosticSettings: enableMonitoring + ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] + : null + licenseType: 'LicenseIncluded' + maxSizeBytes: 34359738368 + name: 'sqldb-${solutionSuffix}' + minCapacity: '1' + sku: { + name: 'GP_S_Gen5' + tier: 'GeneralPurpose' + family: 'Gen5' + capacity: 2 + } + } + ] + location: solutionLocation + managedIdentities: { + systemAssigned: true + userAssignedResourceIds: [ + userAssignedIdentity.outputs.resourceId + ] + } + primaryUserAssignedIdentityResourceId: userAssignedIdentity.outputs.resourceId + privateEndpoints: enablePrivateNetworking + ? [ + { + privateDnsZoneGroup: { + privateDnsZoneGroupConfigs: [ + { + privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.sqlServer]!.outputs.resourceId + } + ] + } + service: 'sqlServer' + subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + tags: tags + } + ] + : [] + firewallRules: (!enablePrivateNetworking) ? [ + { + endIpAddress: '255.255.255.255' + name: 'AllowSpecificRange' + startIpAddress: '0.0.0.0' + } + { + endIpAddress: '0.0.0.0' + name: 'AllowAllWindowsAzureIps' + startIpAddress: '0.0.0.0' + } + ] : [] + tags: tags + } +} + +// ========== Frontend server farm ========== // +var webServerFarmResourceName = 'asp-${solutionSuffix}' +module webServerFarm 'br/public:avm/res/web/serverfarm:0.5.0' = { + name: take('avm.res.web.serverfarm.${webServerFarmResourceName}', 64) + params: { + name: webServerFarmResourceName + tags: tags + enableTelemetry: enableTelemetry + location: solutionLocation + reserved: true + kind: 'linux' + diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + skuName: enableScalability || enableRedundancy ? 'P1v3' : 'B3' + skuCapacity: enableScalability ? 3 : 1 + zoneRedundant: enableRedundancy ? true : false + } +} + +// ========== Frontend web site ========== // +var webSiteResourceName = 'app-${solutionSuffix}' +module webSite 'modules/web-sites.bicep' = { + name: take('module.web-sites.${webSiteResourceName}', 64) + params: { + name: webSiteResourceName + tags: tags + location: solutionLocation + managedIdentities: { userAssignedResourceIds: [userAssignedIdentity!.outputs.resourceId, sqlUserAssignedIdentity!.outputs.resourceId] } + kind: 'app,linux,container' + serverFarmResourceId: webServerFarm.?outputs.resourceId + siteConfig: { + linuxFxVersion: 'DOCKER|${containerRegistryHostnameActual}/${containerImageName}:${imageTag}' + minTlsVersion: '1.2' + } + configs: [ + { + name: 'appsettings' + properties: { + APP_ENV: appEnvironment + APPINSIGHTS_INSTRUMENTATIONKEY: enableMonitoring ? applicationInsights!.outputs.instrumentationKey : '' + APPLICATIONINSIGHTS_CONNECTION_STRING: enableMonitoring ? applicationInsights!.outputs.connectionString : '' + AZURE_SEARCH_SERVICE: aiSearchName + AZURE_SEARCH_INDEX: azureSearchIndex + AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch + AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig + AZURE_SEARCH_TOP_K: azureSearchTopK + AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain + AZURE_SEARCH_CONTENT_COLUMNS: azureSearchContentColumns + AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn + AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn + AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn + AZURE_OPENAI_RESOURCE: aiFoundryAiServices.outputs.name + AZURE_OPENAI_MODEL: gptModelName + AZURE_OPENAI_ENDPOINT: aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] + AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature + AZURE_OPENAI_TOP_P: azureOpenAITopP + AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens + AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence + AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage + AZURE_OPENAI_PREVIEW_API_VERSION: azureOpenaiAPIVersion + AZURE_OPENAI_STREAM: azureOpenAIStream + AZURE_SEARCH_QUERY_TYPE: azureSearchQueryType + AZURE_SEARCH_VECTOR_COLUMNS: azureSearchVectorFields + AZURE_SEARCH_PERMITTED_GROUPS_COLUMN: azureSearchPermittedGroupsField + AZURE_SEARCH_STRICTNESS: azureSearchStrictness + AZURE_OPENAI_EMBEDDING_NAME: embeddingModel + AZURE_OPENAI_EMBEDDING_ENDPOINT : aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] + SQLDB_SERVER: sqlServerFqdn + SQLDB_DATABASE: sqlDbName + USE_INTERNAL_STREAM: useInternalStream + AZURE_COSMOSDB_ACCOUNT: cosmosDb.outputs.name + AZURE_COSMOSDB_CONVERSATIONS_CONTAINER: collectionName + AZURE_COSMOSDB_DATABASE: cosmosDbDatabaseName + AZURE_COSMOSDB_ENABLE_FEEDBACK: azureCosmosDbEnableFeedback + SQLDB_USER_MID: sqlUserAssignedIdentity.outputs.clientId + AZURE_AI_SEARCH_ENDPOINT: 'https://${aiSearchName}.search.windows.net' + AZURE_SQL_SYSTEM_PROMPT: functionAppSqlPrompt + AZURE_CALL_TRANSCRIPT_SYSTEM_PROMPT: functionAppCallTranscriptSystemPrompt + AZURE_OPENAI_STREAM_TEXT_SYSTEM_PROMPT: functionAppStreamTextSystemPrompt + USE_AI_PROJECT_CLIENT: useAIProjectClientFlag + AZURE_AI_AGENT_ENDPOINT: useExistingAiFoundryAiProject ? existingAiFoundryAiServicesProject!.properties.endpoints.inference : aiFoundryAiServices.outputs.aiProjectInfo.apiEndpoint + AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME: gptModelName + AZURE_AI_AGENT_API_VERSION: azureOpenaiAPIVersion + AZURE_SEARCH_CONNECTION_NAME: aiSearchName + AZURE_CLIENT_ID: userAssignedIdentity.outputs.clientId + } + applicationInsightResourceId: enableMonitoring ? applicationInsights!.outputs.resourceId : null + } + ] + diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + vnetRouteAllEnabled: enablePrivateNetworking ? true : false + vnetImagePullEnabled: enablePrivateNetworking ? true : false + virtualNetworkSubnetId: enablePrivateNetworking ? virtualNetwork!.outputs.webSubnetResourceId : null + publicNetworkAccess: 'Enabled' + } +} + +// ========== AI Search Service ========== // +var aiSearchName = 'srch-${solutionSuffix}' +module searchService 'br/public:avm/res/search/search-service:0.11.1' = { + name: take('avm.res.search.search-service.${aiSearchName}', 64) + params: { + name: aiSearchName + authOptions: { + aadOrApiKey: { + aadAuthFailureMode: 'http401WithBearerChallenge' + } + } + diagnosticSettings: enableMonitoring ? [ + { + workspaceResourceId: logAnalyticsWorkspaceResourceId + } + ] : null + disableLocalAuth: false + hostingMode: 'default' + managedIdentities: { + systemAssigned: true + } + networkRuleSet: { + bypass: 'AzureServices' + ipRules: [] + } + roleAssignments: [ + { + roleDefinitionIdOrName: '1407120a-92aa-4202-b7e9-c0e197c71c8f' // Search Index Data Reader + principalId: userAssignedIdentity.outputs.principalId + principalType: 'ServicePrincipal' + } + { + roleDefinitionIdOrName: '7ca78c08-252a-4471-8644-bb5ff32d4ba0' // Search Service Contributor + principalId: userAssignedIdentity.outputs.principalId + principalType: 'ServicePrincipal' + } + { + roleDefinitionIdOrName: '1407120a-92aa-4202-b7e9-c0e197c71c8f' // Search Index Data Reader + principalId: !useExistingAiFoundryAiProject ? aiFoundryAiServices.outputs.aiProjectInfo.aiprojectSystemAssignedMIPrincipalId : existingAiFoundryAiServicesProject!.identity.principalId + principalType: 'ServicePrincipal' + } + { + roleDefinitionIdOrName: '7ca78c08-252a-4471-8644-bb5ff32d4ba0' // Search Service Contributor + principalId: !useExistingAiFoundryAiProject ? aiFoundryAiServices.outputs.aiProjectInfo.aiprojectSystemAssignedMIPrincipalId : existingAiFoundryAiServicesProject!.identity.principalId + principalType: 'ServicePrincipal' + } + // DEVELOPER EXPERIENCE: Add deployer permissions + { + roleDefinitionIdOrName: '1407120a-92aa-4202-b7e9-c0e197c71c8f' // Search Index Data Reader + principalId: deployer().objectId + principalType: 'User' + } + { + roleDefinitionIdOrName: '7ca78c08-252a-4471-8644-bb5ff32d4ba0' // Search Service Contributor + principalId: deployer().objectId + principalType: 'User' + } + ] + partitionCount: 1 + replicaCount: 1 + sku: 'standard' + semanticSearch: 'free' + tags: tags + publicNetworkAccess: 'Enabled' + privateEndpoints: enablePrivateNetworking + ? [ + { + name: 'pep-${aiSearchName}' + customNetworkInterfaceName: 'nic-${aiSearchName}' + privateDnsZoneGroup: { + privateDnsZoneGroupConfigs: [ + { privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.searchService]!.outputs.resourceId } + ] + } + service: 'searchService' + subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + } + ] + : [] + } +} + +// ========== AI Search Project Connection ========== // +resource projectAISearchConnection 'Microsoft.CognitiveServices/accounts/projects/connections@2025-04-01-preview' = if (!useExistingAiFoundryAiProject) { + name: '${aiFoundryAiServicesResourceName}/${aiFoundryAiServicesAiProjectResourceName}/${aiSearchName}' + properties: { + category: 'CognitiveSearch' + target: 'https://${aiSearchName}.search.windows.net' + authType: 'AAD' + isSharedToAll: true + metadata: { + ApiType: 'Azure' + ResourceId: searchService.outputs.resourceId + location: searchService.outputs.location + } + } +} + +module existing_AIProject_SearchConnectionModule 'modules/deploy_aifp_aisearch_connection.bicep' = if (useExistingAiFoundryAiProject) { + name: 'aiProjectSearchConnectionDeployment' + scope: resourceGroup(aiFoundryAiServicesSubscriptionId, aiFoundryAiServicesResourceGroupName) + params: { + existingAIProjectName: aiFoundryAiProjectResourceName + existingAIFoundryName: aiFoundryAiServicesResourceName + aiSearchName: aiSearchName + aiSearchResourceId: searchService.outputs.resourceId + aiSearchLocation: searchService.outputs.location + aiSearchConnectionName: aiSearchName + } +} + +// ========== Existing AI Services Resources ========== // +resource existingAiFoundryAiServices 'Microsoft.CognitiveServices/accounts@2025-04-01-preview' existing = if (useExistingAiFoundryAiProject) { + name: aiFoundryAiServicesResourceName + scope: resourceGroup(aiFoundryAiServicesSubscriptionId, aiFoundryAiServicesResourceGroupName) +} + +resource existingAiFoundryAiServicesProject 'Microsoft.CognitiveServices/accounts/projects@2025-04-01-preview' existing = if (useExistingAiFoundryAiProject) { + name: aiFoundryAiProjectResourceName + parent: existingAiFoundryAiServices +} + +// ========== Search Service Role Assignments ========== // +resource searchServiceToAiServicesRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = if (!useExistingAiFoundryAiProject) { + name: guid(aiSearchName, '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd', aiFoundryAiServicesResourceName) + properties: { + roleDefinitionId: subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd') // Cognitive Services OpenAI User + principalId: searchService.outputs.systemAssignedMIPrincipalId! + principalType: 'ServicePrincipal' + } +} + +// Role assignment for existing AI Services scenario +module searchServiceToExistingAiServicesRoleAssignment 'modules/role-assignment.bicep' = if (useExistingAiFoundryAiProject) { + name: 'searchToExistingAiServices-roleAssignment' + scope: resourceGroup(aiFoundryAiServicesSubscriptionId, aiFoundryAiServicesResourceGroupName) + params: { + principalId: searchService.outputs.systemAssignedMIPrincipalId! + roleDefinitionId: '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd' // Cognitive Services OpenAI User + targetResourceName: aiFoundryAiServices.outputs.name + } +} + +// ========== Outputs ========== // +@description('URL of the deployed web application.') +output WEB_APP_URL string = 'https://${webSite.outputs.name}.azurewebsites.net' + +@description('Name of the deployed web application.') +output WEB_APP_NAME string = webSite.outputs.name + +@description('Name of the Azure Container Registry.') +output AZURE_CONTAINER_REGISTRY_NAME string = containerRegistry.outputs.name + +@description('Login server of the Azure Container Registry.') +output AZURE_CONTAINER_REGISTRY_ENDPOINT string = containerRegistry.outputs.loginServer + +@description('Container registry hostname for docker images.') +output CONTAINER_REGISTRY_HOSTNAME string = containerRegistryHostnameActual + +@description('Container image name.') +output CONTAINER_IMAGE_NAME string = containerImageName + +@description('Container image tag.') +output IMAGE_TAG string = imageTag + +@description('Name of the storage account.') +output STORAGE_ACCOUNT_NAME string = avmStorageAccount.outputs.name + +@description('Name of the storage container.') +output STORAGE_CONTAINER_NAME string = 'data' + +@description('Name of the Key Vault.') +output KEY_VAULT_NAME string = keyvault.outputs.name + +@description('Name of the Cosmos DB account.') +output COSMOSDB_ACCOUNT_NAME string = cosmosDb.outputs.name + +@description('Name of the resource group.') +output RESOURCE_GROUP_NAME string = resourceGroup().name + +@description('The resource ID of the AI Foundry instance.') +output AI_FOUNDRY_RESOURCE_ID string = aiFoundryAiServices.outputs.resourceId + +@description('Name of the SQL Database server.') +output SQLDB_SERVER_NAME string = sqlDBModule.outputs.name + +@description('Name of the SQL Database.') +output SQLDB_DATABASE string = sqlDbName + +@description('Name of the managed identity used by the web app.') +output MANAGEDIDENTITY_WEBAPP_NAME string = userAssignedIdentity.outputs.name + +@description('Client ID of the managed identity used by the web app.') +output MANAGEDIDENTITY_WEBAPP_CLIENTID string = userAssignedIdentity.outputs.clientId + +@description('Name of the managed identity used for SQL database operations.') +output MANAGEDIDENTITY_SQL_NAME string = sqlUserAssignedIdentity.outputs.name + +@description('Client ID of the managed identity used for SQL database operations.') +output MANAGEDIDENTITY_SQL_CLIENTID string = sqlUserAssignedIdentity.outputs.clientId + +@description('Name of the AI Search service.') +output AI_SEARCH_SERVICE_NAME string = aiSearchName + +@description('Specifies the current application environment.') +output APP_ENV string = appEnvironment + +@description('The Application Insights instrumentation key.') +output APPINSIGHTS_INSTRUMENTATIONKEY string = enableMonitoring ? applicationInsights!.outputs.instrumentationKey : '' + +@description('The Application Insights connection string.') +output APPLICATIONINSIGHTS_CONNECTION_STRING string = enableMonitoring ? applicationInsights!.outputs.connectionString : '' + +@description('The Azure Subscription ID where the resources are deployed.') +output AZURE_SUBSCRIPTION_ID string = subscription().subscriptionId + +// Additional outputs for environment configuration +@description('Azure OpenAI endpoint.') +output AZURE_OPENAI_ENDPOINT string = aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] + +@description('Azure OpenAI resource name.') +output AZURE_OPENAI_RESOURCE string = aiFoundryAiServices.outputs.name + +@description('Azure OpenAI model name.') +output AZURE_OPENAI_MODEL string = gptModelName + +@description('Azure AI Search endpoint.') +output AZURE_AI_SEARCH_ENDPOINT string = 'https://${aiSearchName}.search.windows.net' + +@description('Azure Cosmos DB account name.') +output AZURE_COSMOSDB_ACCOUNT string = cosmosDb.outputs.name + +@description('Azure Cosmos DB database name.') +output AZURE_COSMOSDB_DATABASE string = cosmosDbDatabaseName + +@description('Azure Cosmos DB conversations container name.') +output AZURE_COSMOSDB_CONVERSATIONS_CONTAINER string = collectionName + +@description('SQL Database server FQDN.') +output SQLDB_SERVER string = sqlServerFqdn + +@description('Client ID for web app managed identity.') +output AZURE_CLIENT_ID string = userAssignedIdentity.outputs.clientId From 8c73dac4d0660e07f43591a2ffd9f2fdbeb0751b Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Wed, 12 Nov 2025 14:50:29 +0530 Subject: [PATCH 24/32] Add azure-ai-projects and azure-ai-inference to requirement.txt fine for index_script --- infra/scripts/index_scripts/requirements.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/infra/scripts/index_scripts/requirements.txt b/infra/scripts/index_scripts/requirements.txt index ab62b1af..1dce43db 100644 --- a/infra/scripts/index_scripts/requirements.txt +++ b/infra/scripts/index_scripts/requirements.txt @@ -9,6 +9,8 @@ azure-identity azure-ai-textanalytics azure-search-documents==11.7.0b1 azure-keyvault-secrets +azure-ai-projects==1.0.0 +azure-ai-inference==1.0.0b9 pandas datetime From c10f3ba4063f69c0d48258543181d353c3b6de0b Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Wed, 12 Nov 2025 14:53:33 +0530 Subject: [PATCH 25/32] Add azure-ai-projects and azure-ai-inference to requirement.txt fine for index_script and dev script --- src/App/requirements-dev.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/App/requirements-dev.txt b/src/App/requirements-dev.txt index e0b03d3d..6057613a 100644 --- a/src/App/requirements-dev.txt +++ b/src/App/requirements-dev.txt @@ -1,5 +1,7 @@ -r requirements.txt azure-identity==1.25.0 +azure-ai-projects==1.0.0 +azure-ai-inference==1.0.0b9 openai==2.0.1 azure-search-documents==11.7.0b1 azure-storage-blob==12.26.0 From 5023139e9e8d62b6dbbe0bad63a4333179b8de1e Mon Sep 17 00:00:00 2001 From: "Niraj Chaudhari (Persistent Systems Inc)" Date: Mon, 17 Nov 2025 15:18:32 +0530 Subject: [PATCH 26/32] Enable Key Vault while running post deployment script --- infra/scripts/process_sample_data.sh | 46 ++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/infra/scripts/process_sample_data.sh b/infra/scripts/process_sample_data.sh index 3cd038f0..f64c8c1b 100644 --- a/infra/scripts/process_sample_data.sh +++ b/infra/scripts/process_sample_data.sh @@ -19,6 +19,7 @@ azSubscriptionId="" original_storage_public_access="" original_storage_default_action="" original_foundry_public_access="" +original_keyvault_public_access="" aif_resource_group="" aif_account_resource_id="" # Add global variable for SQL Server public access @@ -112,6 +113,28 @@ enable_public_access() { echo "✓ AI Foundry public access already enabled" fi + # Enable public access for Key Vault + echo "Enabling public access for Key Vault: $keyvaultName" + original_keyvault_public_access=$(az keyvault show \ + --name "$keyvaultName" \ + --resource-group "$resourceGroupName" \ + --query "properties.publicNetworkAccess" \ + -o tsv) + if [ "$original_keyvault_public_access" != "Enabled" ]; then + az keyvault update \ + --name "$keyvaultName" \ + --resource-group "$resourceGroupName" \ + --public-network-access Enabled \ + --output none + if [ $? -eq 0 ]; then + echo "✓ Key Vault public access enabled" + else + echo "✗ Failed to enable Key Vault public access" + return 1 + fi + else + echo "✓ Key Vault public access already enabled" + fi # Enable public access for SQL Server echo "Enabling public access for SQL Server: $sqlServerName" @@ -250,6 +273,29 @@ restore_network_access() { else echo "AI Foundry access unchanged (already at desired state)" fi + + # Restore Key Vault access + if [ -n "$original_keyvault_public_access" ] && [ "$original_keyvault_public_access" != "Enabled" ]; then + echo "Restoring Key Vault public access to: $original_keyvault_public_access" + # Handle case sensitivity - convert to proper case + case "$original_keyvault_public_access" in + "enabled"|"Enabled") restore_value="Enabled" ;; + "disabled"|"Disabled") restore_value="Disabled" ;; + *) restore_value="$original_keyvault_public_access" ;; + esac + az keyvault update \ + --name "$keyvaultName" \ + --resource-group "$resourceGroupName" \ + --public-network-access "$restore_value" \ + --output none + if [ $? -eq 0 ]; then + echo "✓ Key Vault access restored" + else + echo "✗ Failed to restore Key Vault access" + fi + else + echo "Key Vault access unchanged (already at desired state)" + fi # Restore SQL Server public access if [ -n "$original_sql_public_access" ] && [ "$original_sql_public_access" != "Enabled" ]; then From 331bd04fc57fe41b90d96f530596fc46281420f1 Mon Sep 17 00:00:00 2001 From: Rafi-Microsoft Date: Wed, 19 Nov 2025 11:43:05 +0530 Subject: [PATCH 27/32] =?UTF-8?q?fix:=20display=20previous=20meeting=20dat?= =?UTF-8?q?es=20when=20asked=20=E2=80=9CProvide=20details=20of=20last=20me?= =?UTF-8?q?etings=E2=80=9D=20(#751)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: merge from dev to main Samplequestion (#737) * Update SampleQuestions.md with response time note (#736) Added a note about average response time and formatted the document. Co-authored-by: Prajwal-Microsoft * fix: update API version to 2025-04-01 and adjust skuCapacity in main.bicep * fix: rename deploymentType parameter to gptModelDeploymentType for clarity * fix: Re-query data after updating sample data to refresh rows (#739) * DataRefreshLogic * pylint * pylintfix * fix * fix: clarify skuCapacity comment in main.bicep for WAF deployment * fix: update skuCapacity configuration for WAF deployment to ensure correct agent handling --------- Co-authored-by: Prajwal-Microsoft Co-authored-by: Pavan-Microsoft Co-authored-by: UtkarshMishra-Microsoft * Add disclaimer for AI solutions in README (#741) Added a disclaimer about AI solutions and compliance. * fix v1 * code cleanup * Remove README changes - reset to dev version --------- Co-authored-by: Prajwal-Microsoft Co-authored-by: Thanusree-Microsoft <168087422+Thanusree-Microsoft@users.noreply.github.com> Co-authored-by: Pavan-Microsoft Co-authored-by: UtkarshMishra-Microsoft --- .../index_scripts/create_search_index.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/infra/scripts/index_scripts/create_search_index.py b/infra/scripts/index_scripts/create_search_index.py index bd5ed961..b1e203a4 100644 --- a/infra/scripts/index_scripts/create_search_index.py +++ b/infra/scripts/index_scripts/create_search_index.py @@ -32,6 +32,7 @@ FileSystemClient, ) from azure.ai.projects import AIProjectClient +from datetime import datetime # Get Azure Key Vault Client key_vault_name = "kv_to-be-replaced" #'nc6262-kv-2fpeafsylfd2e' @@ -79,6 +80,9 @@ SearchableField(name="content", type=SearchFieldDataType.String), SearchableField(name="sourceurl", type=SearchFieldDataType.String), SearchableField(name="client_id", type=SearchFieldDataType.String, filterable=True), + SimpleField(name="meeting_start_time", type=SearchFieldDataType.DateTimeOffset, sortable=True, filterable=True), + SimpleField(name="meeting_end_time", type=SearchFieldDataType.DateTimeOffset, sortable=True, filterable=True), + SearchableField(name="meeting_title", type=SearchFieldDataType.String), SearchField( name="contentVector", type=SearchFieldDataType.Collection(SearchFieldDataType.Single), @@ -253,14 +257,33 @@ def chunk_data(text): chunks = chunk_data(text) chunk_num = 0 + + def convert_to_iso8601(date_str): + """Convert datetime string to ISO 8601 format with UTC timezone""" + if pd.isna(date_str): + return None + try: + dt = pd.to_datetime(date_str) + return dt.strftime('%Y-%m-%dT%H:%M:%S.000Z') + except: + return None + + meeting_start_time = convert_to_iso8601(df_file_metadata.get("StartTime")) + meeting_end_time = convert_to_iso8601(df_file_metadata.get("EndTime")) + meeting_title = str(df_file_metadata["Title"]) if pd.notna(df_file_metadata.get("Title")) else "" + + meeting_start_time_display = str(df_file_metadata["StartTime"]) if pd.notna(df_file_metadata.get("StartTime")) else None + for chunk in chunks: chunk_num += 1 + date_context = f"Meeting Date: {meeting_start_time_display}. " if meeting_start_time_display else "" d = { "chunk_id": document_id + "_" + str(chunk_num).zfill(2), "client_id": str(df_file_metadata["ClientId"]), "content": "ClientId is " + str(df_file_metadata["ClientId"]) + " . " + + date_context + chunk, } @@ -285,6 +308,9 @@ def chunk_data(text): "client_id": d["client_id"], "content": d["content"], "sourceurl": path.name.split("/")[-1], + "meeting_start_time": meeting_start_time, + "meeting_end_time": meeting_end_time, + "meeting_title": meeting_title, "contentVector": v_contentVector, } ) From cb66abc53afd1eff257c7ba4b4df2f85c0c23e2a Mon Sep 17 00:00:00 2001 From: Abdul-Microsoft Date: Wed, 19 Nov 2025 13:32:26 +0530 Subject: [PATCH 28/32] feat: Centralize SQL role assignment in run_create_index_scripts.sh and add assign_sql_roles.py script --- .../scripts/index_scripts/assign_sql_roles.py | 116 ++++++++++++++++++ infra/scripts/process_sample_data.sh | 15 +-- infra/scripts/run_create_index_scripts.sh | 28 ++++- 3 files changed, 146 insertions(+), 13 deletions(-) create mode 100644 infra/scripts/index_scripts/assign_sql_roles.py diff --git a/infra/scripts/index_scripts/assign_sql_roles.py b/infra/scripts/index_scripts/assign_sql_roles.py new file mode 100644 index 00000000..493f7a50 --- /dev/null +++ b/infra/scripts/index_scripts/assign_sql_roles.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +"""Assign SQL roles for Azure AD principals (managed identities/service principals) using Azure AD token auth. + +Simplified: requires --server and --database provided explicitly (no Key Vault lookup). +Roles JSON format (single arg): +[ + {"clientId":"", "displayName":"Name", "role":"db_datareader"}, + {"clientId":"", "displayName":"Name", "role":"db_datawriter"} +] + +Uses pyodbc + azure-identity (AzureCliCredential).""" +import argparse +import json +import struct +import sys +from typing import List, Dict + +import pyodbc +from azure.identity import AzureCliCredential + +SQL_COPT_SS_ACCESS_TOKEN = 1256 # msodbcsql.h constant + + +def build_sql(role_items: List[Dict]) -> str: + statements = [] + for idx, item in enumerate(role_items, start=1): + client_id = item["clientId"].strip() + display_name = item["displayName"].replace("'", "''") + role = item["role"].strip() + # Construct dynamic SQL similar to prior bash script + stmt = f""" +DECLARE @username{idx} nvarchar(max) = N'{display_name}'; +DECLARE @clientId{idx} uniqueidentifier = '{client_id}'; +DECLARE @sid{idx} NVARCHAR(max) = CONVERT(VARCHAR(max), CONVERT(VARBINARY(16), @clientId{idx}), 1); +DECLARE @cmd{idx} NVARCHAR(max) = N'CREATE USER [' + @username{idx} + '] WITH SID = ' + @sid{idx} + ', TYPE = E;'; +IF NOT EXISTS (SELECT * FROM sys.database_principals WHERE name = @username{idx}) +BEGIN + EXEC(@cmd{idx}) +END +EXEC sp_addrolemember '{role}', @username{idx}; +""".strip() + statements.append(stmt) + return "\n".join(statements) + + +def connect_with_token(server: str, database: str, credential: AzureCliCredential): + token_bytes = credential.get_token("https://database.windows.net/.default").token.encode("utf-16-le") + token_struct = struct.pack(f" Date: Wed, 19 Nov 2025 12:55:53 +0000 Subject: [PATCH 29/32] Removed pre-requisite sqlcmd installation from Deployment Guide --- docs/DeploymentGuide.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/DeploymentGuide.md b/docs/DeploymentGuide.md index fecbf3dc..e29ee460 100644 --- a/docs/DeploymentGuide.md +++ b/docs/DeploymentGuide.md @@ -116,7 +116,6 @@ If you're not using one of the above options for opening the project, then you'l - [Docker Desktop](https://www.docker.com/products/docker-desktop/) - [Git](https://git-scm.com/downloads) - [Microsoft ODBC Driver 18 for SQL Server](https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server?view=sql-server-ver16) - - [sqlcmd(ODBC-Windows)](https://learn.microsoft.com/en-us/sql/tools/sqlcmd/sqlcmd-utility?view=sql-server-ver16&tabs=odbc%2Cwindows%2Cwindows-support&pivots=cs1-bash#download-and-install-sqlcmd) / [sqlcmd(Linux/Mac)](https://learn.microsoft.com/en-us/sql/linux/sql-server-linux-setup-tools?view=sql-server-ver16&tabs=redhat-install) 2. Clone the repository or download the project code via command-line: From ff09a28d740ee21ef1eea64c2b7a93f65268e897 Mon Sep 17 00:00:00 2001 From: Kanchan-Microsoft Date: Tue, 25 Nov 2025 12:59:07 +0530 Subject: [PATCH 30/32] feat: Add support for developer experience (#762) * custom templates for developer experience * updated azure_custom.yaml * updated readme * updated template name * updated keyvault prefix to lower case * changes suggested by copilot * main.json * updated the approach * updated main_custom.bicep * added comment --- azure_custom.yaml | 335 ++------------- infra/main_custom.bicep | 677 +++++++++++++++++++++---------- infra/scripts/package_webapp.ps1 | 106 +++++ infra/scripts/package_webapp.sh | 93 +++++ 4 files changed, 676 insertions(+), 535 deletions(-) create mode 100644 infra/scripts/package_webapp.ps1 create mode 100644 infra/scripts/package_webapp.sh diff --git a/azure_custom.yaml b/azure_custom.yaml index 09b79a1e..4a5b3fab 100644 --- a/azure_custom.yaml +++ b/azure_custom.yaml @@ -9,319 +9,30 @@ metadata: template: build-your-own-copilot-solution-accelerator@1.0 name: build-your-own-copilot-solution-accelerator@1.0 -hooks: - # Pre-package hook to set container registry variables - prepackage: - windows: - run: | - Write-Host "Setting up container registry variables..." -ForegroundColor Green - - # Get the ACR name from the deployed infrastructure - $acrName = azd env get-values --output json | ConvertFrom-Json | Select-Object -ExpandProperty "AZURE_CONTAINER_REGISTRY_NAME" -ErrorAction SilentlyContinue - - if ($acrName) { - Write-Host "Using deployed ACR: $acrName" -ForegroundColor Cyan - azd env set AZURE_CONTAINER_REGISTRY_ENDPOINT "$acrName.azurecr.io" - azd env set AZURE_CONTAINER_REGISTRY_NAME $acrName - } else { - Write-Host "Warning: ACR not found in environment. Make sure infrastructure is deployed first." -ForegroundColor Yellow - } - shell: pwsh - continueOnError: true - - posix: - run: | - echo "Setting up container registry variables..." - - # Get the ACR name from the deployed infrastructure - ACR_NAME=$(azd env get-values --output json | jq -r '.AZURE_CONTAINER_REGISTRY_NAME // empty') - - if [ ! -z "$ACR_NAME" ]; then - echo "Using deployed ACR: $ACR_NAME" - azd env set AZURE_CONTAINER_REGISTRY_ENDPOINT "$ACR_NAME.azurecr.io" - azd env set AZURE_CONTAINER_REGISTRY_NAME "$ACR_NAME" - else - echo "Warning: ACR not found in environment. Make sure infrastructure is deployed first." - fi - shell: sh - continueOnError: true - - # Pre-deploy hook to build and push containers - predeploy: - windows: - run: | - Write-Host "🚀 Starting container deployment process..." -ForegroundColor Green - - # Get environment variables from azd - $acrName = azd env get-value AZURE_CONTAINER_REGISTRY_NAME - $resourceGroup = azd env get-value AZURE_RESOURCE_GROUP - $webAppName = azd env get-value WEB_APP_NAME - $imageName = "byc-wa-app" - $imageTag = "latest" - - if (-not $acrName) { - Write-Host "❌ Error: AZURE_CONTAINER_REGISTRY_NAME not set. Run 'azd provision' first." -ForegroundColor Red - exit 1 - } - - if (-not $resourceGroup) { - Write-Host "❌ Error: AZURE_RESOURCE_GROUP not set. Run 'azd provision' first." -ForegroundColor Red - exit 1 - } - - if (-not $webAppName) { - Write-Host "❌ Error: WEB_APP_NAME not set. Run 'azd provision' first." -ForegroundColor Red - exit 1 - } - - Write-Host "📋 Configuration:" -ForegroundColor Cyan - Write-Host " ACR Name: $acrName" -ForegroundColor White - Write-Host " Resource Group: $resourceGroup" -ForegroundColor White - Write-Host " Web App: $webAppName" -ForegroundColor White - Write-Host " Image: $imageName`:$imageTag" -ForegroundColor White - - # Login to ACR - Write-Host "🔐 Logging into ACR..." -ForegroundColor Yellow - az acr login --name $acrName - - if ($LASTEXITCODE -ne 0) { - Write-Host "❌ Failed to login to ACR" -ForegroundColor Red - exit 1 - } - - # Build and push the container image - Write-Host "🏗️ Building container image..." -ForegroundColor Yellow - $fullImageName = "$acrName.azurecr.io/$imageName`:$imageTag" - docker build -f "./src/App/WebApp.Dockerfile" -t $fullImageName "./src" - - if ($LASTEXITCODE -ne 0) { - Write-Host "❌ Failed to build container image" -ForegroundColor Red - exit 1 - } - - Write-Host "📤 Pushing container image to ACR..." -ForegroundColor Yellow - docker push $fullImageName - - if ($LASTEXITCODE -ne 0) { - Write-Host "❌ Failed to push container image" -ForegroundColor Red - exit 1 - } - - # Update environment variables - Write-Host "🔧 Updating azd environment variables..." -ForegroundColor Yellow - azd env set CONTAINER_REGISTRY_HOSTNAME "$acrName.azurecr.io" - azd env set CONTAINER_IMAGE_NAME $imageName - azd env set IMAGE_TAG $imageTag - - # Configure web app ACR authentication using managed identity - Write-Host "🔑 Configuring ACR authentication for web app..." -ForegroundColor Yellow - $webappIdentity = az webapp identity show --name $webAppName --resource-group $resourceGroup --query principalId --output tsv - - if (-not $webappIdentity -or $webappIdentity -eq "null") { - Write-Host "🔄 Enabling managed identity for web app..." -ForegroundColor Yellow - $webappIdentity = az webapp identity assign --name $webAppName --resource-group $resourceGroup --query principalId --output tsv - } - - Write-Host " Web app identity: $webappIdentity" -ForegroundColor White - - # Assign AcrPull role to web app managed identity - Write-Host "🔐 Assigning AcrPull role to web app..." -ForegroundColor Yellow - $acrResourceId = az acr show --name $acrName --resource-group $resourceGroup --query id --output tsv - az role assignment create --assignee $webappIdentity --role AcrPull --scope $acrResourceId - if ($LASTEXITCODE -ne 0) { - Write-Host "⚠️ Role assignment may already exist" -ForegroundColor Yellow - } - - # Configure web app to use ACR with managed identity - Write-Host "🔧 Configuring web app container settings..." -ForegroundColor Yellow - az webapp config appsettings set --name $webAppName --resource-group $resourceGroup --settings ` - DOCKER_REGISTRY_SERVER_URL="https://$acrName.azurecr.io" ` - DOCKER_ENABLE_CI=true - - # Configure web app to use managed identity for ACR authentication - Write-Host "🔐 Enabling ACR managed identity authentication..." -ForegroundColor Yellow - az webapp config set --name $webAppName --resource-group $resourceGroup --acr-use-identity true - - # Update web app to use the new container image - Write-Host "🚀 Updating web app container image..." -ForegroundColor Yellow - $dockerImage = "DOCKER|$fullImageName" - - # Use cmd to avoid PowerShell pipe interpretation issues - $cmd = "az webapp config set --name `"$webAppName`" --resource-group `"$resourceGroup`" --linux-fx-version `"$dockerImage`"" - cmd /c $cmd - - if ($LASTEXITCODE -ne 0) { - Write-Host "❌ Failed to update web app configuration" -ForegroundColor Red - exit 1 - } - - # Restart the web app to ensure it picks up the new configuration - Write-Host "🔄 Restarting web app..." -ForegroundColor Yellow - az webapp restart --name $webAppName --resource-group $resourceGroup - - Write-Host "✅ Container deployment completed successfully!" -ForegroundColor Green - Write-Host "🌐 Web app URL: https://$webAppName.azurewebsites.net" -ForegroundColor Cyan - Write-Host "📦 Container image: $fullImageName" -ForegroundColor Cyan - - Write-Host "" - Write-Host "⏳ The web app may take a few minutes to start up with the new container..." -ForegroundColor Yellow - Write-Host " You can monitor the logs with:" -ForegroundColor White - Write-Host " az webapp log tail --name $webAppName --resource-group $resourceGroup" -ForegroundColor Cyan - shell: pwsh - continueOnError: false - - posix: - run: | - echo "🚀 Starting container deployment process..." - - # Get environment variables from azd - ACR_NAME=$(azd env get-value AZURE_CONTAINER_REGISTRY_NAME) - RESOURCE_GROUP=$(azd env get-value AZURE_RESOURCE_GROUP) - WEB_APP_NAME=$(azd env get-value WEB_APP_NAME) - IMAGE_TAG="latest" - IMAGE_NAME="byc-wa-app" - - if [ -z "$ACR_NAME" ]; then - echo "❌ Error: AZURE_CONTAINER_REGISTRY_NAME not set. Run 'azd provision' first." - exit 1 - fi - - if [ -z "$RESOURCE_GROUP" ]; then - echo "❌ Error: AZURE_RESOURCE_GROUP not set. Run 'azd provision' first." - exit 1 - fi - - if [ -z "$WEB_APP_NAME" ]; then - echo "❌ Error: WEB_APP_NAME not set. Run 'azd provision' first." - exit 1 - fi - - echo "📋 Configuration:" - echo " ACR Name: $ACR_NAME" - echo " Resource Group: $RESOURCE_GROUP" - echo " Web App: $WEB_APP_NAME" - echo " Image: $IMAGE_NAME:$IMAGE_TAG" - - # Login to ACR - echo "🔐 Logging into ACR..." - az acr login --name $ACR_NAME - - # Build and push the container image - echo "🏗️ Building container image..." - FULL_IMAGE_NAME="$ACR_NAME.azurecr.io/$IMAGE_NAME:$IMAGE_TAG" - docker build -f "./src/App/WebApp.Dockerfile" -t $FULL_IMAGE_NAME "./src" - - echo "📤 Pushing container image to ACR..." - docker push $FULL_IMAGE_NAME - - # Update environment variables - echo "🔧 Updating azd environment variables..." - azd env set CONTAINER_REGISTRY_HOSTNAME "$ACR_NAME.azurecr.io" - azd env set CONTAINER_IMAGE_NAME $IMAGE_NAME - azd env set IMAGE_TAG $IMAGE_TAG - - # Configure web app ACR authentication using managed identity - echo "🔑 Configuring ACR authentication for web app..." - WEBAPP_IDENTITY=$(az webapp identity show --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --query principalId --output tsv) - - if [ -z "$WEBAPP_IDENTITY" ] || [ "$WEBAPP_IDENTITY" = "null" ]; then - echo "🔄 Enabling managed identity for web app..." - WEBAPP_IDENTITY=$(az webapp identity assign --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --query principalId --output tsv) - fi - - echo " Web app identity: $WEBAPP_IDENTITY" - - # Assign AcrPull role to web app managed identity - echo "🔐 Assigning AcrPull role to web app..." - ACR_RESOURCE_ID=$(az acr show --name $ACR_NAME --resource-group $RESOURCE_GROUP --query id --output tsv) - az role assignment create --assignee $WEBAPP_IDENTITY --role AcrPull --scope $ACR_RESOURCE_ID || echo "⚠️ Role assignment may already exist" - - # Configure web app to use ACR with managed identity - echo "🔧 Configuring web app container settings..." - az webapp config appsettings set --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --settings \ - DOCKER_REGISTRY_SERVER_URL="https://$ACR_NAME.azurecr.io" \ - DOCKER_ENABLE_CI=true - - # Configure web app to use managed identity for ACR authentication - echo "🔐 Enabling ACR managed identity authentication..." - az webapp config set --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --acr-use-identity true - - # Update web app to use the new container image - echo "🚀 Updating web app container image..." - DOCKER_IMAGE="DOCKER|$FULL_IMAGE_NAME" - az webapp config set --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP --linux-fx-version "$DOCKER_IMAGE" - - # Restart the web app to ensure it picks up the new configuration - echo "🔄 Restarting web app..." - az webapp restart --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP - - echo "✅ Container deployment completed successfully!" - echo "🌐 Web app URL: https://$WEB_APP_NAME.azurewebsites.net" - echo "📦 Container image: $FULL_IMAGE_NAME" - - echo "" - echo "⏳ The web app may take a few minutes to start up with the new container..." - echo " You can monitor the logs with:" - echo " az webapp log tail --name $WEB_APP_NAME --resource-group $RESOURCE_GROUP" - shell: sh - continueOnError: false - - postprovision: - windows: - run: | - Write-Host "Deployment completed successfully!" -ForegroundColor Green - Write-Host "Web app URL: " -NoNewline - Write-Host "$env:WEB_APP_URL" -ForegroundColor Cyan - Write-Host "" - Write-Host "Container Registry: " -NoNewline - Write-Host "$env:AZURE_CONTAINER_REGISTRY_NAME.azurecr.io" -ForegroundColor Cyan - Write-Host "" - Write-Host "Next step:" -ForegroundColor Yellow - Write-Host " Run the following command to grant permissions and load sample data:" -ForegroundColor White - Write-Host " bash ./infra/scripts/process_sample_data.sh $env:AZURE_RESOURCE_GROUP" -ForegroundColor Cyan - Write-Host "" - shell: pwsh - continueOnError: false - interactive: true - - posix: - run: | - echo "Deployment completed successfully!" - echo "Web app URL: $WEB_APP_URL" - echo "" - echo "Container Registry: $AZURE_CONTAINER_REGISTRY_NAME.azurecr.io" - echo "" - echo "Next step:" - echo " Run the following command to grant permissions and load sample data:" - echo " bash ./infra/scripts/process_sample_data.sh $AZURE_RESOURCE_GROUP" - echo "" - shell: sh - continueOnError: false - interactive: true - - postdeploy: - windows: - run: | - Write-Host "✅ Deployment completed! Container deployment was handled by predeploy hook." -ForegroundColor Green - $webAppUrl = azd env get-value WEB_APP_URL - Write-Host "🌐 Web app URL: $webAppUrl" -ForegroundColor Cyan - shell: pwsh - continueOnError: true - - posix: - run: | - echo "✅ Deployment completed! Container deployment was handled by predeploy hook." - WEB_APP_URL=$(azd env get-value WEB_APP_URL) - echo "🌐 Web app URL: $WEB_APP_URL" - shell: sh - continueOnError: true - # Infrastructure configuration infra: - provider: bicep - path: infra + path: ./infra module: main parameters: - containerRegistryHostname: ${CONTAINER_REGISTRY_HOSTNAME=""} - containerImageName: ${CONTAINER_IMAGE_NAME="byc-wa-app"} - imageTag: ${IMAGE_TAG="latest"} \ No newline at end of file + solutionName: bs-azdtest + cosmosLocation: eastus2 + baseUrl: 'https://github.com/microsoft/Build-your-own-copilot-Solution-Accelerator' + +services: + webapp: + project: ./src/App + language: py + host: appservice + dist: ./dist + hooks: + prepackage: + windows: + shell: pwsh + run: ../../infra/scripts/package_webapp.ps1 + interactive: true + continueOnError: false + posix: + shell: sh + run: bash ../../infra/scripts/package_webapp.sh + interactive: true + continueOnError: false \ No newline at end of file diff --git a/infra/main_custom.bicep b/infra/main_custom.bicep index f089dbf2..160358b1 100644 --- a/infra/main_custom.bicep +++ b/infra/main_custom.bicep @@ -1,5 +1,4 @@ // ========== main_custom.bicep ========== // -// Developer-friendly version with customer ACR and user permissions for local debugging targetScope = 'resourceGroup' @minLength(3) @@ -34,11 +33,13 @@ param gptModelVersion string = '2024-07-18' @description('Optional. Version of the GPT model to deploy.') param embeddingModelVersion string = '2' -@description('Optional. API version for the Azure OpenAI service.') -param azureOpenaiAPIVersion string = '2025-04-01-preview' +@description('Optional. API version for the Azure AI Services.') +param azureAIServicesAPIVersion string = '2025-04-01-preview' @minValue(10) @description('Optional. Capacity of the GPT deployment:') +// You can increase this, but capacity is limited per model/region, so you will get errors if you go over +// https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits param gptModelCapacity int = 200 @minLength(1) @@ -52,6 +53,7 @@ param embeddingModel string = 'text-embedding-ada-002' @description('Optional. Capacity of the Embedding Model deployment') param embeddingDeploymentCapacity int = 80 +//restricting to these regions because assistants api for gpt-4o-mini is available only in these regions @allowed([ 'australiaeast' 'eastus' @@ -86,15 +88,15 @@ param azureAiServiceLocation string 'uksouth' ]) @metadata({ azd: { type: 'location' } }) -@description('Required. Azure region for all services. Regions are restricted to guarantee compatibility with paired regions and replica locations for data redundancy and failover scenarios.') +@description('Required. Azure region for all services. Regions are restricted to guarantee compatibility with paired regions and replica locations for data redundancy and failover scenarios based on articles [Azure regions list](https://learn.microsoft.com/azure/reliability/regions-list) and [Azure Database for MySQL Flexible Server - Azure Regions](https://learn.microsoft.com/azure/mysql/flexible-server/overview#azure-regions).') param location string var solutionLocation = empty(location) ? resourceGroup().location : location @maxLength(5) -@description('Optional. A unique token for the solution. This is used to ensure resource names are unique for global resources.') +@description('Optional. A unique token for the solution. This is used to ensure resource names are unique for global resources. Defaults to a 5-character substring of the unique string generated from the subscription ID, resource group name, and solution name.') param solutionUniqueToken string = substring(uniqueString(subscription().id, resourceGroup().name, solutionName), 0, 5) -var solutionSuffix = toLower(trim(replace( +var solutionSuffix= toLower(trim(replace( replace( replace(replace(replace(replace('${solutionName}${solutionUniqueToken}', '-', ''), '_', ''), '.', ''), '/', ''), ' ', @@ -107,29 +109,27 @@ var solutionSuffix = toLower(trim(replace( @description('Optional. Enable private networking for applicable resources, aligned with the Well Architected Framework recommendations. Defaults to false.') param enablePrivateNetworking bool = false -@description('Optional. Enable monitoring applicable resources, aligned with the Well Architected Framework recommendations.') +@description('Optional. Enable monitoring applicable resources, aligned with the Well Architected Framework recommendations. This setting enables Application Insights and Log Analytics and configures all the resources applicable resources to send logs. Defaults to false.') param enableMonitoring bool = false -@description('Optional. Enable scalability for applicable resources, aligned with the Well Architected Framework recommendations.') +@description('Optional. Enable scalability for applicable resources, aligned with the Well Architected Framework recommendations. Defaults to false.') param enableScalability bool = false @description('Optional. Enable/Disable usage telemetry for module.') param enableTelemetry bool = true -@description('Optional. Enable redundancy for applicable resources, aligned with the Well Architected Framework recommendations.') +@description('Optional. Enable redundancy for applicable resources, aligned with the Well Architected Framework recommendations. Defaults to false.') param enableRedundancy bool = false -// ========== DEVELOPER EXPERIENCE: Customer ACR Configuration ========== // -var acrName = 'acr${solutionSuffix}' -var containerRegistryHostnameActual = empty(containerRegistryHostname) ? '${acrName}.azurecr.io' : containerRegistryHostname -@description('Customer Container Registry hostname where the docker images are located.') -param containerRegistryHostname string = '' +//The following parameters are commented out because container registry details are handled dynamically +// @description('Optional. The Container Registry hostname where the docker images for the frontend are located.') +// param containerRegistryHostname string = 'bycwacontainerreg.azurecr.io' -@description('Optional. The Container Image Name to deploy on the webapp.') -param containerImageName string = 'byc-wa-app' +// @description('Optional. The Container Image Name to deploy on the webapp.') +// param containerImageName string = 'byc-wa-app' @description('Optional. The Container Image Tag to deploy on the webapp.') -param imageTag string = 'latest' +param imageTag string = 'latest_waf_2025-09-18_794' @description('Optional. Resource ID of an existing Foundry project') param existingFoundryProjectResourceId string = '' @@ -137,7 +137,9 @@ param existingFoundryProjectResourceId string = '' @description('Optional. Enable purge protection for the Key Vault') param enablePurgeProtection bool = false -// Application configuration variables +// Load the abbrevations file required to name the azure resources. +//var abbrs = loadJsonContent('./abbreviations.json') + var appEnvironment = 'Prod' var azureSearchIndex = 'transcripts_index' var azureSearchUseSemanticSearch = 'True' @@ -157,24 +159,23 @@ var azureSearchQueryType = 'simple' var azureSearchVectorFields = 'contentVector' var azureSearchPermittedGroupsField = '' var azureSearchStrictness = '3' -var azureSearchEnableInDomain = 'False' +var azureSearchEnableInDomain = 'False' // Set to 'True' if you want to enable in-domain search var azureCosmosDbEnableFeedback = 'True' var useInternalStream = 'True' var useAIProjectClientFlag = 'False' -var sqlServerFqdn = 'sql-${solutionSuffix}${environment().suffixes.sqlServerHostname}' +var sqlServerFqdn = 'sql-${solutionSuffix}.database.windows.net' -@description('Optional. Size of the Jumpbox Virtual Machine when created.') +@description('Optional. Size of the Jumpbox Virtual Machine when created. Set to custom value if enablePrivateNetworking is true.') param vmSize string? -@description('Optional. Admin username for the Jumpbox Virtual Machine.') +@description('Optional. Admin username for the Jumpbox Virtual Machine. Set to custom value if enablePrivateNetworking is true.') @secure() param vmAdminUsername string? -@description('Optional. Admin password for the Jumpbox Virtual Machine.') +@description('Optional. Admin password for the Jumpbox Virtual Machine. Set to custom value if enablePrivateNetworking is true.') @secure() param vmAdminPassword string? -// System prompts var functionAppSqlPrompt = '''Generate a valid T-SQL query to find {query} for tables and columns provided below: 1. Table: Clients Columns: ClientId, Client, Email, Occupation, MaritalStatus, Dependents @@ -197,11 +198,13 @@ var functionAppSqlPrompt = '''Generate a valid T-SQL query to find {query} for t ALWAYS use ClientId = {clientid} in the query filter. ALWAYS select Client Name (Column: Client) in the query. Query filters are IMPORTANT. Add filters like AssetType, AssetDate, etc. if needed. - When answering scheduling or time-based meeting questions, always use the StartTime column from ClientMeetings table. + When answering scheduling or time-based meeting questions, always use the StartTime column from ClientMeetings table. Use correct logic to return the most recent past meeting (last/previous) or the nearest future meeting (next/upcoming), and ensure only StartTime column is used for meeting timing comparisons. + For asset values: If the question is about "asset value", "total asset value", "portfolio value", or "AUM" → ALWAYS return the SUM of the latest investments (do not return individual rows). If the question is about "current asset value" or "current investment value" → return all latest investments without SUM. + For trend queries: If the question contains "how did change", "over the last", "trend", or "progression" → return time series data for the requested period with SUM for each time period and show chronological progression. Only return the generated SQL query. Do not return anything else.''' var functionAppCallTranscriptSystemPrompt = '''You are an assistant who supports wealth advisors in preparing for client meetings. - You have access to the client's past meeting call transcripts. + You have access to the client’s past meeting call transcripts. When answering questions, especially summary requests, provide a detailed and structured response that includes key topics, concerns, decisions, and trends. If no data is available, state 'No relevant data found for previous meetings.''' @@ -211,7 +214,7 @@ var functionAppStreamTextSystemPrompt = '''The currently selected client's name If no data is found, respond with 'No data found for that client.' Remove any client identifiers from the final response. Always send clientId as '{client_id}'.''' -// Region configuration +// Replica regions list based on article in [Azure regions list](https://learn.microsoft.com/azure/reliability/regions-list) and [Enhance resilience by replicating your Log Analytics workspace across regions](https://learn.microsoft.com/azure/azure-monitor/logs/workspace-replication#supported-regions) for supported regions for Log Analytics Workspace. var replicaRegionPairs = { australiaeast: 'australiasoutheast' centralus: 'westus' @@ -229,8 +232,9 @@ var replicaLocation = replicaRegionPairs[resourceGroup().location] @description('Optional. The tags to apply to all deployed Azure resources.') param tags resourceInput<'Microsoft.Resources/resourceGroups@2025-04-01'>.tags = {} +// Region pairs list based on article in [Azure Database for MySQL Flexible Server - Azure Regions](https://learn.microsoft.com/azure/mysql/flexible-server/overview#azure-regions) for supported high availability regions for CosmosDB. var cosmosDbZoneRedundantHaRegionPairs = { - australiaeast: 'uksouth' + australiaeast: 'uksouth' //'southeastasia' centralus: 'eastus2' eastasia: 'southeastasia' eastus: 'centralus' @@ -249,9 +253,13 @@ var allTags = union( tags ) +// Paired location calculated based on 'location' parameter. This location will be used by applicable resources if `enableScalability` is set to `true` var cosmosDbHaLocation = cosmosDbZoneRedundantHaRegionPairs[resourceGroup().location] + +// Extracts subscription, resource group, and workspace name from the resource ID when using an existing Log Analytics workspace var useExistingLogAnalytics = !empty(existingLogAnalyticsWorkspaceId) -var logAnalyticsWorkspaceResourceId = useExistingLogAnalytics ? existingLogAnalyticsWorkspaceId : logAnalyticsWorkspace!.outputs.resourceId + +var logAnalyticsWorkspaceResourceId = useExistingLogAnalytics ? existingLogAnalyticsWorkspaceId : logAnalyticsWorkspace!.outputs.resourceId @description('Tag, Created by user name') param createdBy string = contains(deployer(), 'userPrincipalName')? split(deployer().userPrincipalName, '@')[0]: deployer().objectId @@ -263,7 +271,7 @@ resource resourceGroupTags 'Microsoft.Resources/tags@2021-04-01' = { tags: { ...resourceGroup().tags ...tags - TemplateName: 'Client Advisor - Developer Experience' + TemplateName: 'Client Advisor- Developer Experience' Type: enablePrivateNetworking ? 'WAF' : 'Non-WAF' CreatedBy: createdBy DeploymentName: deployment().name @@ -272,6 +280,8 @@ resource resourceGroupTags 'Microsoft.Resources/tags@2021-04-01' = { } // ========== Log Analytics Workspace ========== // +// WAF best practices for Log Analytics: https://learn.microsoft.com/en-us/azure/well-architected/service-guides/azure-log-analytics +// WAF PSRules for Log Analytics: https://azure.github.io/PSRule.Rules.Azure/en/rules/resource/#azure-monitor-logs var logAnalyticsWorkspaceResourceName = 'log-${solutionSuffix}' module logAnalyticsWorkspace 'br/public:avm/res/operational-insights/workspace:0.12.0' = if (enableMonitoring && !useExistingLogAnalytics) { name: take('avm.res.operational-insights.workspace.${logAnalyticsWorkspaceResourceName}', 64) @@ -284,19 +294,57 @@ module logAnalyticsWorkspace 'br/public:avm/res/operational-insights/workspace:0 dataRetention: 365 features: { enableLogAccessUsingOnlyResourcePermissions: true } diagnosticSettings: [{ useThisWorkspace: true }] - dailyQuotaGb: enableRedundancy ? 10 : null + // WAF aligned configuration for Redundancy + dailyQuotaGb: enableRedundancy ? 10 : null //WAF recommendation: 10 GB per day is a good starting point for most workloads replication: enableRedundancy ? { enabled: true location: replicaLocation } : null + // WAF aligned configuration for Private Networking publicNetworkAccessForIngestion: enablePrivateNetworking ? 'Disabled' : 'Enabled' publicNetworkAccessForQuery: enablePrivateNetworking ? 'Disabled' : 'Enabled' + dataSources: enablePrivateNetworking + ? [ + { + tags: tags + eventLogName: 'Application' + eventTypes: [ + { + eventType: 'Error' + } + { + eventType: 'Warning' + } + { + eventType: 'Information' + } + ] + kind: 'WindowsEvent' + name: 'applicationEvent' + } + { + counterName: '% Processor Time' + instanceName: '*' + intervalSeconds: 60 + kind: 'WindowsPerformanceCounter' + name: 'windowsPerfCounter1' + objectName: 'Processor' + } + { + kind: 'IISLogs' + name: 'sampleIISLog1' + state: 'OnPremiseEnabled' + } + ] + : null } } // ========== Application Insights ========== // +// WAF best practices for Application Insights: https://learn.microsoft.com/en-us/azure/well-architected/service-guides/application-insights +// WAF PSRules for Application Insights: https://azure.github.io/PSRule.Rules.Azure/en/rules/resource/#application-insights var applicationInsightsResourceName = 'appi-${solutionSuffix}' module applicationInsights 'br/public:avm/res/insights/component:0.6.0' = if (enableMonitoring) { name: take('avm.res.insights.component.${applicationInsightsResourceName}', 64) @@ -309,12 +357,14 @@ module applicationInsights 'br/public:avm/res/insights/component:0.6.0' = if (en kind: 'web' disableIpMasking: false flowType: 'Bluefield' + // WAF aligned configuration for Monitoring workspaceResourceId: enableMonitoring ? logAnalyticsWorkspaceResourceId : '' diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null } } // ========== User Assigned Identity ========== // +// WAF best practices for identity and access management: https://learn.microsoft.com/en-us/azure/well-architected/security/identity-access var userAssignedIdentityResourceName = 'id-${solutionSuffix}' module userAssignedIdentity 'br/public:avm/res/managed-identity/user-assigned-identity:0.4.1' = { name: take('avm.res.managed-identity.user-assigned-identity.${userAssignedIdentityResourceName}', 64) @@ -327,6 +377,7 @@ module userAssignedIdentity 'br/public:avm/res/managed-identity/user-assigned-id } // ========== SQL Operations User Assigned Identity ========== // +// Dedicated identity for backend SQL operations with limited permissions (db_datareader, db_datawriter) var sqlUserAssignedIdentityResourceName = 'id-sql-${solutionSuffix}' module sqlUserAssignedIdentity 'br/public:avm/res/managed-identity/user-assigned-identity:0.4.1' = { name: take('avm.res.managed-identity.user-assigned-identity.${sqlUserAssignedIdentityResourceName}', 64) @@ -338,61 +389,112 @@ module sqlUserAssignedIdentity 'br/public:avm/res/managed-identity/user-assigned } } -// ========== DEVELOPER EXPERIENCE: Azure Container Registry ========== // -module containerRegistry 'br/public:avm/res/container-registry/registry:0.5.0' = { - name: take('avm.res.container-registry.registry.${acrName}', 64) +// ========== Virtual Network and Networking Components ========== // + +// Virtual Network with NSGs and Subnets +module virtualNetwork 'modules/virtualNetwork.bicep' = if (enablePrivateNetworking) { + name: take('module.virtualNetwork.${solutionSuffix}', 64) params: { - name: acrName +name: 'vnet-${solutionSuffix}' + addressPrefixes: ['10.0.0.0/20'] // 4096 addresses (enough for 8 /23 subnets or 16 /24) location: solutionLocation - tags: tags + tags: allTags + logAnalyticsWorkspaceId: logAnalyticsWorkspaceResourceId + resourceSuffix: solutionSuffix enableTelemetry: enableTelemetry - acrSku: 'Basic' - acrAdminUserEnabled: false - publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' - roleAssignments: [ - { - principalId: userAssignedIdentity.outputs.principalId - roleDefinitionIdOrName: 'AcrPush' - principalType: 'ServicePrincipal' - } - { - principalId: userAssignedIdentity.outputs.principalId - roleDefinitionIdOrName: 'AcrPull' - principalType: 'ServicePrincipal' - } - { - principalId: deployer().objectId - roleDefinitionIdOrName: 'AcrPush' - principalType: 'User' - } + } +} +// Azure Bastion Host +var bastionHostName = 'bas-${solutionSuffix}' +module bastionHost 'br/public:avm/res/network/bastion-host:0.6.1' = if (enablePrivateNetworking) { + name: take('avm.res.network.bastion-host.${bastionHostName}', 64) + params: { + name: bastionHostName + skuName: 'Standard' + location: solutionLocation + virtualNetworkResourceId: virtualNetwork!.outputs.resourceId + diagnosticSettings: [ { - principalId: deployer().objectId - roleDefinitionIdOrName: 'AcrPull' - principalType: 'User' + name: 'bastionDiagnostics' + workspaceResourceId: logAnalyticsWorkspaceResourceId + logCategoriesAndGroups: [ + { + categoryGroup: 'allLogs' + enabled: true + } + ] } ] - diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + tags: tags + enableTelemetry: enableTelemetry + publicIPAddressObject: { + name: 'pip-${bastionHostName}' + zones: [] + } } } -// ========== Virtual Network and Networking Components ========== // -module virtualNetwork 'modules/virtualNetwork.bicep' = if (enablePrivateNetworking) { - name: take('module.virtualNetwork.${solutionSuffix}', 64) +// Jumpbox Virtual Machine +var jumpboxVmName = take('vm-jumpbox-${solutionSuffix}', 15) +module jumpboxVM 'br/public:avm/res/compute/virtual-machine:0.15.0' = if (enablePrivateNetworking) { + name: take('avm.res.compute.virtual-machine.${jumpboxVmName}', 64) params: { - name: 'vnet-${solutionSuffix}' - addressPrefixes: ['10.0.0.0/20'] + name: take(jumpboxVmName, 15) // Shorten VM name to 15 characters to avoid Azure limits + vmSize: vmSize ?? 'Standard_DS2_v2' location: solutionLocation - tags: allTags - logAnalyticsWorkspaceId: logAnalyticsWorkspaceResourceId - resourceSuffix: solutionSuffix + adminUsername: vmAdminUsername ?? 'JumpboxAdminUser' + adminPassword: vmAdminPassword ?? 'JumpboxAdminP@ssw0rd1234!' + tags: tags + zone: 0 + imageReference: { + offer: 'WindowsServer' + publisher: 'MicrosoftWindowsServer' + sku: '2019-datacenter' + version: 'latest' + } + osType: 'Windows' + osDisk: { + name: 'osdisk-${jumpboxVmName}' + managedDisk: { + storageAccountType: 'Standard_LRS' + } + } + encryptionAtHost: false // Some Azure subscriptions do not support encryption at host + nicConfigurations: [ + { + name: 'nic-${jumpboxVmName}' + ipConfigurations: [ + { + name: 'ipconfig1' + subnetResourceId: virtualNetwork!.outputs.jumpboxSubnetResourceId + } + ] + diagnosticSettings: [ + { + name: 'jumpboxDiagnostics' + workspaceResourceId: logAnalyticsWorkspaceResourceId + logCategoriesAndGroups: [ + { + categoryGroup: 'allLogs' + enabled: true + } + ] + metricCategories: [ + { + category: 'AllMetrics' + enabled: true + } + ] + } + ] + } + ] enableTelemetry: enableTelemetry } } - // ========== Private DNS Zones ========== // var privateDnsZones = [ 'privatelink.cognitiveservices.azure.com' - 'privatelink.openai.azure.com' 'privatelink.services.ai.azure.com' 'privatelink.azurewebsites.net' 'privatelink.blob.${environment().suffixes.storage}' @@ -402,30 +504,34 @@ var privateDnsZones = [ 'privatelink.vaultcore.azure.net' 'privatelink${environment().suffixes.sqlServerHostname}' 'privatelink.search.windows.net' - 'privatelink.azurecr.io' ] +// DNS Zone Index Constants var dnsZoneIndex = { cognitiveServices: 0 - openAI: 1 - aiServices: 2 - appService: 3 - storageBlob: 4 - storageQueue: 5 - storageFile: 6 - cosmosDB: 7 - keyVault: 8 - sqlServer: 9 - searchService: 10 - containerRegistry: 11 + aiServices: 1 + appService: 2 + storageBlob: 3 + storageQueue: 4 + storageFile: 5 + cosmosDB: 6 + keyVault: 7 + sqlServer: 8 + searchService: 9 } +// List of DNS zone indices that correspond to AI-related services. var aiRelatedDnsZoneIndices = [ dnsZoneIndex.cognitiveServices - dnsZoneIndex.openAI dnsZoneIndex.aiServices ] + +// =================================================== +// DEPLOY PRIVATE DNS ZONES +// - Deploys all zones if no existing Foundry project is used +// - Excludes AI-related zones when using with an existing Foundry project +// =================================================== @batchSize(5) module avmPrivateDnsZones 'br/public:avm/res/network/private-dns-zone:0.7.1' = [ for (zone, i) in privateDnsZones: if (enablePrivateNetworking && (empty(existingFoundryProjectResourceId) || !contains(aiRelatedDnsZoneIndices, i))) { @@ -444,7 +550,7 @@ module avmPrivateDnsZones 'br/public:avm/res/network/private-dns-zone:0.7.1' = [ } ] -// ========== Key Vault Module ========== // +// ==========Key Vault Module ========== // var keyVaultName = 'kv-${solutionSuffix}' module keyvault 'br/public:avm/res/key-vault/vault:0.12.1' = { name: take('avm.res.key-vault.vault.${keyVaultName}', 64) @@ -465,6 +571,7 @@ module keyvault 'br/public:avm/res/key-vault/vault:0.12.1' = { enablePurgeProtection: enablePurgeProtection softDeleteRetentionInDays: 7 diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : [] + // WAF aligned configuration for Private Networking privateEndpoints: enablePrivateNetworking ? [ { @@ -480,6 +587,7 @@ module keyvault 'br/public:avm/res/key-vault/vault:0.12.1' = { } ] : [] + // WAF aligned configuration for Role-based Access Control roleAssignments: [ { principalId: userAssignedIdentity.outputs.principalId @@ -491,32 +599,47 @@ module keyvault 'br/public:avm/res/key-vault/vault:0.12.1' = { principalType: 'ServicePrincipal' roleDefinitionIdOrName: 'Key Vault Secrets User' } - // DEVELOPER EXPERIENCE: Add deployer permissions - { - principalId: deployer().objectId - principalType: 'User' - roleDefinitionIdOrName: 'Key Vault Administrator' - } ] secrets: [ - { - name: 'SQLDB-SERVER' - value: sqlServerFqdn - } - { - name: 'SQLDB-DATABASE' - value: sqlDbName - } - { - name: 'AZURE-OPENAI-PREVIEW-API-VERSION' - value: azureOpenaiAPIVersion - } + { + name: 'SQLDB-SERVER' + value: sqlServerFqdn + } + { + name: 'SQLDB-DATABASE' + value: sqlDbName + } + { + name: 'AZURE-OPENAI-PREVIEW-API-VERSION' + value: azureAIServicesAPIVersion + } + { + name: 'AZURE-OPENAI-ENDPOINT' + value: aiFoundryAiServices.outputs.endpoint + } + { + name: 'AZURE-OPENAI-EMBEDDING-MODEL' + value: embeddingModel + } + { + name: 'AZURE-SEARCH-INDEX' + value: azureSearchIndex + } + { + name: 'AZURE-SEARCH-ENDPOINT' + value: 'https://${aiSearchName}.search.windows.net' + } + { + name: 'AZURE-AI-AGENT-ENDPOINT' + value: aiFoundryAiServices.outputs.aiProjectInfo.apiEndpoint + } ] enableTelemetry: enableTelemetry } } // ========== AI Foundry: AI Services ========== // +// WAF best practices for Open AI: https://learn.microsoft.com/en-us/azure/well-architected/service-guides/azure-openai var useExistingAiFoundryAiProject = !empty(existingFoundryProjectResourceId) var aiFoundryAiServicesSubscriptionId = useExistingAiFoundryAiProject @@ -531,7 +654,10 @@ var aiFoundryAiServicesResourceName = useExistingAiFoundryAiProject var aiFoundryAiProjectResourceName = useExistingAiFoundryAiProject ? split(existingFoundryProjectResourceId, '/')[10] : 'proj-${solutionSuffix}' +// AI Project resource id: /subscriptions//resourceGroups//providers/Microsoft.CognitiveServices/accounts//projects/ +// NOTE: Required version 'Microsoft.CognitiveServices/accounts@2024-04-01-preview' not available in AVM +// var aiFoundryAiServicesResourceName = 'aif-${solutionSuffix}' var aiFoundryAiServicesAiProjectResourceName = 'proj-${solutionSuffix}' var aiFoundryAIservicesEnabled = true var aiFoundryAiServicesModelDeployment = { @@ -568,13 +694,15 @@ module aiFoundryAiServices 'modules/ai-services.bicep' = if (aiFoundryAIservices kind: 'AIServices' disableLocalAuth: true customSubDomainName: aiFoundryAiServicesResourceName - apiProperties: {} + apiProperties: { + //staticsEnabled: false + } networkAcls: { defaultAction: 'Allow' virtualNetworkRules: [] ipRules: [] } - managedIdentities: { userAssignedResourceIds: [userAssignedIdentity!.outputs.resourceId] } + managedIdentities: { userAssignedResourceIds: [userAssignedIdentity!.outputs.resourceId] } //To create accounts or projects, you must enable a managed identity on your resource roleAssignments: [ { roleDefinitionIdOrName: '53ca6127-db72-4b80-b1b0-d745d6d5456d' // Azure AI User @@ -591,23 +719,8 @@ module aiFoundryAiServices 'modules/ai-services.bicep' = if (aiFoundryAIservices principalId: userAssignedIdentity.outputs.principalId principalType: 'ServicePrincipal' } - // DEVELOPER EXPERIENCE: Add deployer permissions for local debugging - { - roleDefinitionIdOrName: '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd' // Cognitive Services OpenAI User - principalId: deployer().objectId - principalType: 'User' - } - { - roleDefinitionIdOrName: '64702f94-c441-49e6-a78b-ef80e0188fee' // Azure AI Developer - principalId: deployer().objectId - principalType: 'User' - } - { - roleDefinitionIdOrName: '53ca6127-db72-4b80-b1b0-d745d6d5456d' // Azure AI User - principalId: deployer().objectId - principalType: 'User' - } ] + // WAF aligned configuration for Monitoring diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' privateEndpoints: (enablePrivateNetworking && empty(existingFoundryProjectResourceId)) @@ -622,10 +735,6 @@ module aiFoundryAiServices 'modules/ai-services.bicep' = if (aiFoundryAIservices name: 'ai-services-dns-zone-cognitiveservices' privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.cognitiveServices]!.outputs.resourceId } - { - name: 'ai-services-dns-zone-openai' - privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.openAI]!.outputs.resourceId - } { name: 'ai-services-dns-zone-aiservices' privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.aiServices]!.outputs.resourceId @@ -666,13 +775,15 @@ module aiFoundryAiServices 'modules/ai-services.bicep' = if (aiFoundryAIservices } } -// ========== DEVELOPER EXPERIENCE: Cosmos DB with User Permissions ========== // +//========== AVM WAF ========== // +//========== Cosmos DB module ========== // var cosmosDbResourceName = 'cosmos-${solutionSuffix}' var cosmosDbDatabaseName = 'db_conversation_history' var collectionName = 'conversations' module cosmosDb 'br/public:avm/res/document-db/database-account:0.15.0' = { name: take('avm.res.document-db.database-account.${cosmosDbResourceName}', 64) params: { + // Required parameters name: cosmosDbResourceName location: cosmosLocation tags: tags @@ -692,20 +803,19 @@ module cosmosDb 'br/public:avm/res/document-db/database-account:0.15.0' = { ] dataPlaneRoleDefinitions: [ { + // Cosmos DB Built-in Data Contributor: https://docs.azure.cn/en-us/cosmos-db/nosql/security/reference-data-plane-roles#cosmos-db-built-in-data-contributor roleName: 'Cosmos DB SQL Data Contributor' dataActions: [ 'Microsoft.DocumentDB/databaseAccounts/readMetadata' 'Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/*' 'Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/items/*' ] - assignments: [ - { principalId: userAssignedIdentity.outputs.principalId } - // DEVELOPER EXPERIENCE: Add deployer permissions for local debugging - { principalId: deployer().objectId } - ] + assignments: [{ principalId: userAssignedIdentity.outputs.principalId }] } ] + // WAF aligned configuration for Monitoring diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + // WAF aligned configuration for Private Networking networkRestrictions: { networkAclBypass: 'None' publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' @@ -725,6 +835,7 @@ module cosmosDb 'br/public:avm/res/document-db/database-account:0.15.0' = { } ] : [] + // WAF aligned configuration for Redundancy zoneRedundant: enableRedundancy ? true : false capabilitiesToAdd: enableRedundancy ? null : ['EnableServerless'] automaticFailover: enableRedundancy ? true : false @@ -752,6 +863,7 @@ module cosmosDb 'br/public:avm/res/document-db/database-account:0.15.0' = { dependsOn: [keyvault, avmStorageAccount] } +// ========== AVM WAF ========== // // ========== Storage account module ========== // var storageAccountName = 'st${solutionSuffix}' module avmStorageAccount 'br/public:avm/res/storage/storage-account:0.20.0' = { @@ -771,19 +883,15 @@ module avmStorageAccount 'br/public:avm/res/storage/storage-account:0.20.0' = { roleDefinitionIdOrName: 'Storage Blob Data Contributor' principalType: 'ServicePrincipal' } - // DEVELOPER EXPERIENCE: Add deployer permissions - { - principalId: deployer().objectId - roleDefinitionIdOrName: 'Storage Blob Data Contributor' - principalType: 'User' - } ] + // WAF aligned networking networkAcls: { bypass: 'AzureServices' defaultAction: enablePrivateNetworking ? 'Deny' : 'Allow' } allowBlobPublicAccess: false publicNetworkAccess: enablePrivateNetworking ? 'Disabled' : 'Enabled' + // Private endpoints for blob and queue privateEndpoints: enablePrivateNetworking ? [ { @@ -830,12 +938,44 @@ module avmStorageAccount 'br/public:avm/res/storage/storage-account:0.20.0' = { dependsOn: [keyvault] } +// working version of saving storage account secrets in key vault using AVM module +module saveStorageAccountSecretsInKeyVault 'br/public:avm/res/key-vault/vault:0.12.1' = { + name: take('saveStorageAccountSecretsInKeyVault.${keyVaultName}', 64) + params: { + name: keyVaultName + enablePurgeProtection: enablePurgeProtection + enableVaultForDeployment: true + enableVaultForDiskEncryption: true + enableVaultForTemplateDeployment: true + enableRbacAuthorization: true + enableSoftDelete: true + softDeleteRetentionInDays: 7 + secrets: [ + { + name: 'ADLS-ACCOUNT-NAME' + value: storageAccountName + } + { + name: 'ADLS-ACCOUNT-CONTAINER' + value: 'data' + } + { + name: 'ADLS-ACCOUNT-KEY' + value: avmStorageAccount.outputs.primaryAccessKey + } + ] + } +} + +// ========== AVM WAF ========== // // ========== SQL module ========== // var sqlDbName = 'sqldb-${solutionSuffix}' module sqlDBModule 'br/public:avm/res/sql/server:0.20.1' = { name: take('avm.res.sql.server.${sqlDbName}', 64) params: { + // Required parameters name: 'sql-${solutionSuffix}' + // Non-required parameters administrators: { azureADOnlyAuthentication: true login: userAssignedIdentity.outputs.name @@ -847,6 +987,9 @@ module sqlDBModule 'br/public:avm/res/sql/server:0.20.1' = { databases: [ { zoneRedundant: enableRedundancy + // When enableRedundancy is true (zoneRedundant=true), set availabilityZone to -1 + // to let Azure automatically manage zone placement across multiple zones. + // When enableRedundancy is false, also use -1 (no specific zone assignment). availabilityZone: -1 collation: 'SQL_Latin1_General_CP1_CI_AS' diagnosticSettings: enableMonitoring @@ -905,6 +1048,8 @@ module sqlDBModule 'br/public:avm/res/sql/server:0.20.1' = { } // ========== Frontend server farm ========== // +// WAF best practices for Web Application Services: https://learn.microsoft.com/en-us/azure/well-architected/service-guides/app-service-web-apps +// PSRule for Web Server Farm: https://azure.github.io/PSRule.Rules.Azure/en/rules/resource/#app-service var webServerFarmResourceName = 'asp-${solutionSuffix}' module webServerFarm 'br/public:avm/res/web/serverfarm:0.5.0' = { name: take('avm.res.web.serverfarm.${webServerFarmResourceName}', 64) @@ -915,27 +1060,36 @@ module webServerFarm 'br/public:avm/res/web/serverfarm:0.5.0' = { location: solutionLocation reserved: true kind: 'linux' + // WAF aligned configuration for Monitoring diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + // WAF aligned configuration for Scalability skuName: enableScalability || enableRedundancy ? 'P1v3' : 'B3' - skuCapacity: enableScalability ? 3 : 1 + // skuCapacity: enableScalability ? 3 : 1 + skuCapacity: 1 // skuCapacity set to 1 (not 3) due to multiple agents created per type during WAF deployment + // WAF aligned configuration for Redundancy zoneRedundant: enableRedundancy ? true : false } } // ========== Frontend web site ========== // +// WAF best practices for web app service: https://learn.microsoft.com/en-us/azure/well-architected/service-guides/app-service-web-apps +// PSRule for Web Server Farm: https://azure.github.io/PSRule.Rules.Azure/en/rules/resource/#app-service + +//NOTE: AVM module adds 1 MB of overhead to the template. Keeping vanilla resource to save template size. var webSiteResourceName = 'app-${solutionSuffix}' module webSite 'modules/web-sites.bicep' = { name: take('module.web-sites.${webSiteResourceName}', 64) params: { name: webSiteResourceName - tags: tags + tags: union(tags, { 'azd-service-name': 'webapp' }) location: solutionLocation managedIdentities: { userAssignedResourceIds: [userAssignedIdentity!.outputs.resourceId, sqlUserAssignedIdentity!.outputs.resourceId] } - kind: 'app,linux,container' + kind: 'app,linux' serverFarmResourceId: webServerFarm.?outputs.resourceId siteConfig: { - linuxFxVersion: 'DOCKER|${containerRegistryHostnameActual}/${containerImageName}:${imageTag}' + linuxFxVersion: 'PYTHON|3.11' minTlsVersion: '1.2' + appCommandLine: 'python -m uvicorn app:app --host 0.0.0.0 --port 8000' } configs: [ { @@ -944,6 +1098,8 @@ module webSite 'modules/web-sites.bicep' = { APP_ENV: appEnvironment APPINSIGHTS_INSTRUMENTATIONKEY: enableMonitoring ? applicationInsights!.outputs.instrumentationKey : '' APPLICATIONINSIGHTS_CONNECTION_STRING: enableMonitoring ? applicationInsights!.outputs.connectionString : '' + WEBSITES_PORT: '8000' + SCM_DO_BUILD_DURING_DEPLOYMENT: 'true' AZURE_SEARCH_SERVICE: aiSearchName AZURE_SEARCH_INDEX: azureSearchIndex AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch @@ -956,20 +1112,20 @@ module webSite 'modules/web-sites.bicep' = { AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn AZURE_OPENAI_RESOURCE: aiFoundryAiServices.outputs.name AZURE_OPENAI_MODEL: gptModelName - AZURE_OPENAI_ENDPOINT: aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] + AZURE_OPENAI_ENDPOINT: aiFoundryAiServices.outputs.endpoint AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature AZURE_OPENAI_TOP_P: azureOpenAITopP AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage - AZURE_OPENAI_PREVIEW_API_VERSION: azureOpenaiAPIVersion + AZURE_OPENAI_PREVIEW_API_VERSION: azureAIServicesAPIVersion AZURE_OPENAI_STREAM: azureOpenAIStream AZURE_SEARCH_QUERY_TYPE: azureSearchQueryType AZURE_SEARCH_VECTOR_COLUMNS: azureSearchVectorFields AZURE_SEARCH_PERMITTED_GROUPS_COLUMN: azureSearchPermittedGroupsField AZURE_SEARCH_STRICTNESS: azureSearchStrictness AZURE_OPENAI_EMBEDDING_NAME: embeddingModel - AZURE_OPENAI_EMBEDDING_ENDPOINT : aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] + AZURE_OPENAI_EMBEDDING_ENDPOINT : aiFoundryAiServices.outputs.endpoint SQLDB_SERVER: sqlServerFqdn SQLDB_DATABASE: sqlDbName USE_INTERNAL_STREAM: useInternalStream @@ -983,16 +1139,18 @@ module webSite 'modules/web-sites.bicep' = { AZURE_CALL_TRANSCRIPT_SYSTEM_PROMPT: functionAppCallTranscriptSystemPrompt AZURE_OPENAI_STREAM_TEXT_SYSTEM_PROMPT: functionAppStreamTextSystemPrompt USE_AI_PROJECT_CLIENT: useAIProjectClientFlag - AZURE_AI_AGENT_ENDPOINT: useExistingAiFoundryAiProject ? existingAiFoundryAiServicesProject!.properties.endpoints.inference : aiFoundryAiServices.outputs.aiProjectInfo.apiEndpoint + AZURE_AI_AGENT_ENDPOINT: aiFoundryAiServices.outputs.aiProjectInfo.apiEndpoint AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME: gptModelName - AZURE_AI_AGENT_API_VERSION: azureOpenaiAPIVersion + AZURE_AI_AGENT_API_VERSION: azureAIServicesAPIVersion AZURE_SEARCH_CONNECTION_NAME: aiSearchName AZURE_CLIENT_ID: userAssignedIdentity.outputs.clientId } + // WAF aligned configuration for Monitoring applicationInsightResourceId: enableMonitoring ? applicationInsights!.outputs.resourceId : null } ] diagnosticSettings: enableMonitoring ? [{ workspaceResourceId: logAnalyticsWorkspaceResourceId }] : null + // WAF aligned configuration for Private Networking vnetRouteAllEnabled: enablePrivateNetworking ? true : false vnetImagePullEnabled: enablePrivateNetworking ? true : false virtualNetworkSubnetId: enablePrivateNetworking ? virtualNetwork!.outputs.webSubnetResourceId : null @@ -1000,12 +1158,24 @@ module webSite 'modules/web-sites.bicep' = { } } -// ========== AI Search Service ========== // +resource existingAiFoundryAiServices 'Microsoft.CognitiveServices/accounts@2025-04-01-preview' existing = if (useExistingAiFoundryAiProject) { + name: aiFoundryAiServicesResourceName + scope: resourceGroup(aiFoundryAiServicesSubscriptionId, aiFoundryAiServicesResourceGroupName) +} + +resource existingAiFoundryAiServicesProject 'Microsoft.CognitiveServices/accounts/projects@2025-04-01-preview' existing = if (useExistingAiFoundryAiProject) { + name: aiFoundryAiProjectResourceName + parent: existingAiFoundryAiServices +} + + var aiSearchName = 'srch-${solutionSuffix}' module searchService 'br/public:avm/res/search/search-service:0.11.1' = { name: take('avm.res.search.search-service.${aiSearchName}', 64) params: { + // Required parameters name: aiSearchName + // Authentication options authOptions: { aadOrApiKey: { aadAuthFailureMode: 'http401WithBearerChallenge' @@ -1046,44 +1216,33 @@ module searchService 'br/public:avm/res/search/search-service:0.11.1' = { principalId: !useExistingAiFoundryAiProject ? aiFoundryAiServices.outputs.aiProjectInfo.aiprojectSystemAssignedMIPrincipalId : existingAiFoundryAiServicesProject!.identity.principalId principalType: 'ServicePrincipal' } - // DEVELOPER EXPERIENCE: Add deployer permissions - { - roleDefinitionIdOrName: '1407120a-92aa-4202-b7e9-c0e197c71c8f' // Search Index Data Reader - principalId: deployer().objectId - principalType: 'User' - } - { - roleDefinitionIdOrName: '7ca78c08-252a-4471-8644-bb5ff32d4ba0' // Search Service Contributor - principalId: deployer().objectId - principalType: 'User' - } ] partitionCount: 1 replicaCount: 1 sku: 'standard' semanticSearch: 'free' + // Use the deployment tags provided to the template tags: tags publicNetworkAccess: 'Enabled' - privateEndpoints: enablePrivateNetworking - ? [ - { - name: 'pep-${aiSearchName}' - customNetworkInterfaceName: 'nic-${aiSearchName}' - privateDnsZoneGroup: { - privateDnsZoneGroupConfigs: [ - { privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.searchService]!.outputs.resourceId } - ] - } - service: 'searchService' - subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + privateEndpoints: false + ? [ + { + name: 'pep-${aiSearchName}' + customNetworkInterfaceName: 'nic-${aiSearchName}' + privateDnsZoneGroup: { + privateDnsZoneGroupConfigs: [ + { privateDnsZoneResourceId: avmPrivateDnsZones[dnsZoneIndex.searchService]!.outputs.resourceId } + ] } - ] - : [] + service: 'searchService' + subnetResourceId: virtualNetwork!.outputs.pepsSubnetResourceId + } + ] + : [] } } -// ========== AI Search Project Connection ========== // -resource projectAISearchConnection 'Microsoft.CognitiveServices/accounts/projects/connections@2025-04-01-preview' = if (!useExistingAiFoundryAiProject) { +resource projectAISearchConnection 'Microsoft.CognitiveServices/accounts/projects/connections@2025-04-01-preview' = if (!useExistingAiFoundryAiProject) { name: '${aiFoundryAiServicesResourceName}/${aiFoundryAiServicesAiProjectResourceName}/${aiSearchName}' properties: { category: 'CognitiveSearch' @@ -1111,18 +1270,7 @@ module existing_AIProject_SearchConnectionModule 'modules/deploy_aifp_aisearch_c } } -// ========== Existing AI Services Resources ========== // -resource existingAiFoundryAiServices 'Microsoft.CognitiveServices/accounts@2025-04-01-preview' existing = if (useExistingAiFoundryAiProject) { - name: aiFoundryAiServicesResourceName - scope: resourceGroup(aiFoundryAiServicesSubscriptionId, aiFoundryAiServicesResourceGroupName) -} - -resource existingAiFoundryAiServicesProject 'Microsoft.CognitiveServices/accounts/projects@2025-04-01-preview' existing = if (useExistingAiFoundryAiProject) { - name: aiFoundryAiProjectResourceName - parent: existingAiFoundryAiServices -} - -// ========== Search Service Role Assignments ========== // +// ========== Search Service to AI Services Role Assignment ========== // resource searchServiceToAiServicesRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = if (!useExistingAiFoundryAiProject) { name: guid(aiSearchName, '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd', aiFoundryAiServicesResourceName) properties: { @@ -1147,24 +1295,6 @@ module searchServiceToExistingAiServicesRoleAssignment 'modules/role-assignment. @description('URL of the deployed web application.') output WEB_APP_URL string = 'https://${webSite.outputs.name}.azurewebsites.net' -@description('Name of the deployed web application.') -output WEB_APP_NAME string = webSite.outputs.name - -@description('Name of the Azure Container Registry.') -output AZURE_CONTAINER_REGISTRY_NAME string = containerRegistry.outputs.name - -@description('Login server of the Azure Container Registry.') -output AZURE_CONTAINER_REGISTRY_ENDPOINT string = containerRegistry.outputs.loginServer - -@description('Container registry hostname for docker images.') -output CONTAINER_REGISTRY_HOSTNAME string = containerRegistryHostnameActual - -@description('Container image name.') -output CONTAINER_IMAGE_NAME string = containerImageName - -@description('Container image tag.') -output IMAGE_TAG string = imageTag - @description('Name of the storage account.') output STORAGE_ACCOUNT_NAME string = avmStorageAccount.outputs.name @@ -1200,10 +1330,11 @@ output MANAGEDIDENTITY_SQL_NAME string = sqlUserAssignedIdentity.outputs.name @description('Client ID of the managed identity used for SQL database operations.') output MANAGEDIDENTITY_SQL_CLIENTID string = sqlUserAssignedIdentity.outputs.clientId - @description('Name of the AI Search service.') -output AI_SEARCH_SERVICE_NAME string = aiSearchName +output AI_SEARCH_SERVICE_NAME string = aiSearchName +@description('Name of the deployed web application.') +output WEB_APP_NAME string = webSite.outputs.name @description('Specifies the current application environment.') output APP_ENV string = appEnvironment @@ -1211,35 +1342,135 @@ output APP_ENV string = appEnvironment output APPINSIGHTS_INSTRUMENTATIONKEY string = enableMonitoring ? applicationInsights!.outputs.instrumentationKey : '' @description('The Application Insights connection string.') -output APPLICATIONINSIGHTS_CONNECTION_STRING string = enableMonitoring ? applicationInsights!.outputs.connectionString : '' +output APPLICATIONINSIGHTS_CONNECTION_STRING string = enableMonitoring + ? applicationInsights!.outputs.connectionString + : '' -@description('The Azure Subscription ID where the resources are deployed.') -output AZURE_SUBSCRIPTION_ID string = subscription().subscriptionId + @description('The API version used for the Azure AI Agent service.') +output AZURE_AI_AGENT_API_VERSION string = azureAIServicesAPIVersion -// Additional outputs for environment configuration -@description('Azure OpenAI endpoint.') -output AZURE_OPENAI_ENDPOINT string = aiFoundryAiServices.outputs.endpoints['OpenAI Language Model Instance API'] +@description('The endpoint URL of the Azure AI Agent project.') +output AZURE_AI_AGENT_ENDPOINT string = aiFoundryAiServices.outputs.aiProjectInfo.apiEndpoint -@description('Azure OpenAI resource name.') -output AZURE_OPENAI_RESOURCE string = aiFoundryAiServices.outputs.name +@description('The deployment name of the GPT model for the Azure AI Agent.') +output AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME string = gptModelName -@description('Azure OpenAI model name.') -output AZURE_OPENAI_MODEL string = gptModelName +@description('The endpoint URL of the Azure AI Search service.') +output AZURE_AI_SEARCH_ENDPOINT string = 'https://${aiSearchName}.search.windows.net' -@description('Azure AI Search endpoint.') -output AZURE_AI_SEARCH_ENDPOINT string = 'https://${aiSearchName}.search.windows.net' +@description('The system prompt used for call transcript processing in Azure Functions.') +output AZURE_CALL_TRANSCRIPT_SYSTEM_PROMPT string = functionAppCallTranscriptSystemPrompt -@description('Azure Cosmos DB account name.') +@description('The name of the Azure Cosmos DB account.') output AZURE_COSMOSDB_ACCOUNT string = cosmosDb.outputs.name -@description('Azure Cosmos DB database name.') +@description('The name of the Azure Cosmos DB container for storing conversations.') +output AZURE_COSMOSDB_CONVERSATIONS_CONTAINER string = collectionName + +@description('The name of the Azure Cosmos DB database.') output AZURE_COSMOSDB_DATABASE string = cosmosDbDatabaseName -@description('Azure Cosmos DB conversations container name.') -output AZURE_COSMOSDB_CONVERSATIONS_CONTAINER string = collectionName +@description('Indicates whether feedback is enabled in Azure Cosmos DB.') +output AZURE_COSMOSDB_ENABLE_FEEDBACK string = azureCosmosDbEnableFeedback + +@description('The endpoint URL for the Azure OpenAI Embedding model.') +output AZURE_OPENAI_EMBEDDING_ENDPOINT string = aiFoundryAiServices.outputs.endpoint -@description('SQL Database server FQDN.') +@description('The name of the Azure OpenAI Embedding model.') +output AZURE_OPENAI_EMBEDDING_NAME string = embeddingModel + +@description('The endpoint URL for the Azure OpenAI service.') +output AZURE_OPENAI_ENDPOINT string = aiFoundryAiServices.outputs.endpoint + +@description('The maximum number of tokens for Azure OpenAI responses.') +output AZURE_OPENAI_MAX_TOKENS string = azureOpenAIMaxTokens + +@description('The name of the Azure OpenAI GPT model.') +output AZURE_OPENAI_MODEL string = gptModelName + +@description('The preview API version for Azure OpenAI.') +output AZURE_OPENAI_PREVIEW_API_VERSION string = azureAIServicesAPIVersion + +@description('The Azure OpenAI resource name.') +output AZURE_OPENAI_RESOURCE string = aiFoundryAiServices.outputs.name + +@description('The stop sequence(s) for Azure OpenAI responses.') +output AZURE_OPENAI_STOP_SEQUENCE string = azureOpenAIStopSequence + +@description('Indicates whether streaming is enabled for Azure OpenAI responses.') +output AZURE_OPENAI_STREAM string = azureOpenAIStream + +@description('The system prompt for streaming text responses in Azure Functions.') +output AZURE_OPENAI_STREAM_TEXT_SYSTEM_PROMPT string = functionAppStreamTextSystemPrompt + +@description('The system message for Azure OpenAI requests.') +output AZURE_OPENAI_SYSTEM_MESSAGE string = azureOpenAISystemMessage + +@description('The temperature setting for Azure OpenAI responses.') +output AZURE_OPENAI_TEMPERATURE string = azureOpenAITemperature + +@description('The Top-P setting for Azure OpenAI responses.') +output AZURE_OPENAI_TOP_P string = azureOpenAITopP + +@description('The name of the Azure AI Search connection.') +output AZURE_SEARCH_CONNECTION_NAME string = aiSearchName + +@description('The columns in Azure AI Search that contain content.') +output AZURE_SEARCH_CONTENT_COLUMNS string = azureSearchContentColumns + +@description('Indicates whether in-domain filtering is enabled for Azure AI Search.') +output AZURE_SEARCH_ENABLE_IN_DOMAIN string = azureSearchEnableInDomain + +@description('The filename column used in Azure AI Search.') +output AZURE_SEARCH_FILENAME_COLUMN string = azureSearchFilenameColumn + +@description('The name of the Azure AI Search index.') +output AZURE_SEARCH_INDEX string = azureSearchIndex + +@description('The permitted groups field used in Azure AI Search.') +output AZURE_SEARCH_PERMITTED_GROUPS_COLUMN string = azureSearchPermittedGroupsField + +@description('The query type for Azure AI Search.') +output AZURE_SEARCH_QUERY_TYPE string = azureSearchQueryType + +@description('The semantic search configuration name in Azure AI Search.') +output AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG string = azureSearchSemanticSearchConfig + +@description('The name of the Azure AI Search service.') +output AZURE_SEARCH_SERVICE string = aiSearchName + +@description('The strictness setting for Azure AI Search semantic ranking.') +output AZURE_SEARCH_STRICTNESS string = azureSearchStrictness + +@description('The title column used in Azure AI Search.') +output AZURE_SEARCH_TITLE_COLUMN string = azureSearchTitleColumn + +@description('The number of top results (K) to return from Azure AI Search.') +output AZURE_SEARCH_TOP_K string = azureSearchTopK + +@description('The URL column used in Azure AI Search.') +output AZURE_SEARCH_URL_COLUMN string = azureSearchUrlColumn + +@description('Indicates whether semantic search is used in Azure AI Search.') +output AZURE_SEARCH_USE_SEMANTIC_SEARCH string = azureSearchUseSemanticSearch + +@description('The vector fields used in Azure AI Search.') +output AZURE_SEARCH_VECTOR_COLUMNS string = azureSearchVectorFields + +@description('The system prompt for SQL queries in Azure Functions.') +output AZURE_SQL_SYSTEM_PROMPT string = functionAppSqlPrompt + +@description('The fully qualified domain name (FQDN) of the Azure SQL Server.') output SQLDB_SERVER string = sqlServerFqdn -@description('Client ID for web app managed identity.') -output AZURE_CLIENT_ID string = userAssignedIdentity.outputs.clientId +@description('The client ID of the managed identity for the web application.') +output SQLDB_USER_MID string = userAssignedIdentity.outputs.clientId + +@description('Indicates whether the AI Project Client should be used.') +output USE_AI_PROJECT_CLIENT string = useAIProjectClientFlag + +@description('Indicates whether the internal stream should be used.') +output USE_INTERNAL_STREAM string = useInternalStream + +@description('The Azure Subscription ID where the resources are deployed.') +output AZURE_SUBSCRIPTION_ID string = subscription().subscriptionId diff --git a/infra/scripts/package_webapp.ps1 b/infra/scripts/package_webapp.ps1 new file mode 100644 index 00000000..8b0dca4a --- /dev/null +++ b/infra/scripts/package_webapp.ps1 @@ -0,0 +1,106 @@ +#!/usr/bin/env pwsh + +# Package web app for Azure App Service deployment +# This script builds the frontend and packages the backend with static files into a zip + +Write-Host "Starting web app packaging for App Service..." -ForegroundColor Cyan + +$ErrorActionPreference = "Stop" + +# Get the script directory and navigate to project root +$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path +$projectRoot = Resolve-Path (Join-Path $scriptDir "../..") +$srcDir = Join-Path $projectRoot "src/App" +$distDir = Join-Path $srcDir "dist" + +Write-Host "Project root: $projectRoot" -ForegroundColor Gray +Write-Host "Source directory: $srcDir" -ForegroundColor Gray +Write-Host "Dist directory: $distDir" -ForegroundColor Gray + +# Clean dist directory if it exists +if (Test-Path $distDir) { + Write-Host "Cleaning existing dist directory..." -ForegroundColor Yellow + Remove-Item -Path $distDir -Recurse -Force +} + +# Create dist directory +Write-Host "Creating dist directory..." -ForegroundColor Yellow +New-Item -Path $distDir -ItemType Directory -Force | Out-Null + +# Step 1: Build frontend +Write-Host "`nStep 1: Building frontend..." -ForegroundColor Cyan +$frontendDir = Join-Path $srcDir "frontend" + +if (-not (Test-Path (Join-Path $frontendDir "node_modules"))) { + Write-Host "Installing frontend dependencies..." -ForegroundColor Yellow + Push-Location $frontendDir + try { + npm ci + if ($LASTEXITCODE -ne 0) { + throw "npm ci failed" + } + } finally { + Pop-Location + } +} + +Write-Host "Running frontend build..." -ForegroundColor Yellow +Push-Location $frontendDir +try { + $env:NODE_OPTIONS = "--max_old_space_size=8192" + npm run build + if ($LASTEXITCODE -ne 0) { + throw "Frontend build failed" + } +} finally { + Pop-Location + Remove-Item Env:\NODE_OPTIONS -ErrorAction SilentlyContinue +} + +# Step 2: Copy backend files +Write-Host "`nStep 2: Copying backend files..." -ForegroundColor Cyan + +# Copy Python files and backend code +$filesToCopy = @( + "app.py", + "requirements.txt", + "start.sh", + "start.cmd" +) + +foreach ($file in $filesToCopy) { + $sourcePath = Join-Path $srcDir $file + if (Test-Path $sourcePath) { + Write-Host " Copying $file" -ForegroundColor Gray + Copy-Item -Path $sourcePath -Destination $distDir -Force + } +} + +# Copy backend directory +$backendSrc = Join-Path $srcDir "backend" +$backendDst = Join-Path $distDir "backend" +if (Test-Path $backendSrc) { + Write-Host " Copying backend directory..." -ForegroundColor Gray + Copy-Item -Path $backendSrc -Destination $backendDst -Recurse -Force +} + +# Copy static files (built frontend) +$staticSrc = Join-Path $srcDir "static" +$staticDst = Join-Path $distDir "static" +if (Test-Path $staticSrc) { + Write-Host " Copying static directory (frontend build output)..." -ForegroundColor Gray + Copy-Item -Path $staticSrc -Destination $staticDst -Recurse -Force +} else { + Write-Host " WARNING: Static directory not found at $staticSrc" -ForegroundColor Yellow +} + +# Verify the dist directory +$fileCount = (Get-ChildItem -Path $distDir -Recurse -File | Measure-Object).Count +$distSize = (Get-ChildItem -Path $distDir -Recurse | Measure-Object -Property Length -Sum).Sum / 1MB + +Write-Host "`n✓ Successfully prepared deployment package!" -ForegroundColor Green +Write-Host " Dist location: $distDir" -ForegroundColor Cyan +Write-Host " Total files: $fileCount" -ForegroundColor Cyan +Write-Host " Total size: $([math]::Round($distSize, 2)) MB" -ForegroundColor Cyan + +Write-Host "`nPackaging complete! azd will handle zip creation during deployment." -ForegroundColor Green diff --git a/infra/scripts/package_webapp.sh b/infra/scripts/package_webapp.sh new file mode 100644 index 00000000..79291917 --- /dev/null +++ b/infra/scripts/package_webapp.sh @@ -0,0 +1,93 @@ +#!/bin/bash + +# Package web app for Azure App Service deployment +# This script builds the frontend and packages the backend with static files into a zip + +set -e + +echo "Starting web app packaging for App Service..." + +# Get the script directory and navigate to project root +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" +SRC_DIR="$PROJECT_ROOT/src/App" +DIST_DIR="$SRC_DIR/dist" + +echo "Project root: $PROJECT_ROOT" +echo "Source directory: $SRC_DIR" +echo "Dist directory: $DIST_DIR" + +# Clean dist directory if it exists +if [ -d "$DIST_DIR" ]; then + echo "Cleaning existing dist directory..." + rm -rf "$DIST_DIR" +fi + +# Create dist directory +echo "Creating dist directory..." +mkdir -p "$DIST_DIR" + +# Step 1: Build frontend +echo "" +echo "Step 1: Building frontend..." +FRONTEND_DIR="$SRC_DIR/frontend" + +if [ ! -d "$FRONTEND_DIR/node_modules" ]; then + echo "Installing frontend dependencies..." + cd "$FRONTEND_DIR" + npm ci + cd "$PROJECT_ROOT" +fi + +echo "Running frontend build..." +cd "$FRONTEND_DIR" +export NODE_OPTIONS=--max_old_space_size=8192 +npm run build +unset NODE_OPTIONS +cd "$PROJECT_ROOT" + +# Step 2: Copy backend files +echo "" +echo "Step 2: Copying backend files..." + +# Copy Python files and backend code +FILES_TO_COPY=( + "app.py" + "requirements.txt" + "start.sh" + "start.cmd" +) + +for file in "${FILES_TO_COPY[@]}"; do + if [ -f "$SRC_DIR/$file" ]; then + echo " Copying $file" + cp "$SRC_DIR/$file" "$DIST_DIR/" + fi +done + +# Copy backend directory +if [ -d "$SRC_DIR/backend" ]; then + echo " Copying backend directory..." + cp -r "$SRC_DIR/backend" "$DIST_DIR/" +fi + +# Copy static files (built frontend) +if [ -d "$SRC_DIR/static" ]; then + echo " Copying static directory (frontend build output)..." + cp -r "$SRC_DIR/static" "$DIST_DIR/" +else + echo " WARNING: Static directory not found at $SRC_DIR/static" +fi + +# Verify the dist directory +FILE_COUNT=$(find "$DIST_DIR" -type f | wc -l) +DIST_SIZE=$(du -sh "$DIST_DIR" | cut -f1) + +echo "" +echo "✓ Successfully prepared deployment package!" +echo " Dist location: $DIST_DIR" +echo " Total files: $FILE_COUNT" +echo " Total size: $DIST_SIZE" + +echo "" +echo "Packaging complete! azd will handle zip creation during deployment." From 040483cfbb8e15421ccd9475016a44ec74a5ef0f Mon Sep 17 00:00:00 2001 From: Kanchan-Microsoft Date: Tue, 25 Nov 2025 17:03:27 +0530 Subject: [PATCH 31/32] resolved conflicts (#763) --- .../plugins/test_chat_with_data_plugin.py | 32 ------------------- 1 file changed, 32 deletions(-) diff --git a/src/App/tests/backend/plugins/test_chat_with_data_plugin.py b/src/App/tests/backend/plugins/test_chat_with_data_plugin.py index 554115b4..ae30ec96 100644 --- a/src/App/tests/backend/plugins/test_chat_with_data_plugin.py +++ b/src/App/tests/backend/plugins/test_chat_with_data_plugin.py @@ -44,38 +44,6 @@ def test_get_project_openai_client_success_updated( api_version="2025-04-01-preview" ) - @patch("backend.plugins.chat_with_data_plugin.config") - @patch("backend.plugins.chat_with_data_plugin.AIProjectClient") - @patch("backend.plugins.chat_with_data_plugin.get_azure_credential") - def test_get_project_openai_client_success( - self, mock_default_credential, mock_ai_project_client, mock_config - ): - """Test successful creation of project OpenAI client.""" - # Mock config values - mock_config.AI_PROJECT_ENDPOINT = "https://test.ai.azure.com" - mock_config.AZURE_OPENAI_PREVIEW_API_VERSION = "2025-04-01-preview" - - mock_credential = MagicMock() - mock_default_credential.return_value = mock_credential - - mock_project_instance = MagicMock() - mock_openai_client = MagicMock() - mock_project_instance.inference.get_azure_openai_client.return_value = ( - mock_openai_client - ) - mock_ai_project_client.return_value = mock_project_instance - - result = self.plugin.get_project_openai_client() - - assert result == mock_openai_client - mock_default_credential.assert_called_once() - mock_ai_project_client.assert_called_once_with( - endpoint="https://test.ai.azure.com", credential=mock_credential - ) - mock_project_instance.inference.get_azure_openai_client.assert_called_once_with( - api_version="2025-04-01-preview" - ) - @pytest.mark.asyncio @patch("backend.plugins.chat_with_data_plugin.get_connection") @patch("backend.plugins.chat_with_data_plugin.config") From b0e4265a13311b4c31c5c6dc6f15e6d90a987206 Mon Sep 17 00:00:00 2001 From: Kanchan-Microsoft Date: Tue, 25 Nov 2025 17:14:17 +0530 Subject: [PATCH 32/32] regenerated main.json --- infra/main.json | 55 ++++++++++++++++++++++++------------------------- 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/infra/main.json b/infra/main.json index 08224a55..ba13a0b5 100644 --- a/infra/main.json +++ b/infra/main.json @@ -5,8 +5,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "2150473040924531792" + "version": "0.39.26.7824", + "templateHash": "8399698956794037004" } }, "parameters": { @@ -5294,8 +5294,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "1734974014097019118" + "version": "0.39.26.7824", + "templateHash": "14641679443140532549" } }, "definitions": { @@ -24250,8 +24250,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "12531718623039828267" + "version": "0.39.26.7824", + "templateHash": "1936381873810101836" }, "name": "Cognitive Services", "description": "This module deploys a Cognitive Service." @@ -25483,8 +25483,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "1396927448823284485" + "version": "0.39.26.7824", + "templateHash": "17351518472581919759" } }, "definitions": { @@ -27296,8 +27296,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "7420599935384266971" + "version": "0.39.26.7824", + "templateHash": "4291957610087788581" } }, "definitions": { @@ -27450,8 +27450,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "5676565623284126112" + "version": "0.39.26.7824", + "templateHash": "5108472911734987415" } }, "definitions": { @@ -27687,8 +27687,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "1396927448823284485" + "version": "0.39.26.7824", + "templateHash": "17351518472581919759" } }, "definitions": { @@ -29500,8 +29500,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "7420599935384266971" + "version": "0.39.26.7824", + "templateHash": "4291957610087788581" } }, "definitions": { @@ -29654,8 +29654,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "5676565623284126112" + "version": "0.39.26.7824", + "templateHash": "5108472911734987415" } }, "definitions": { @@ -29912,9 +29912,8 @@ } }, "dependsOn": [ - "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').cognitiveServices)]", "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').aiServices)]", - "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').openAI)]", + "[format('avmPrivateDnsZones[{0}]', variables('dnsZoneIndex').cognitiveServices)]", "logAnalyticsWorkspace", "userAssignedIdentity", "virtualNetwork" @@ -50028,8 +50027,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "14170137035624875111" + "version": "0.39.26.7824", + "templateHash": "13074777962389399773" } }, "definitions": { @@ -51041,8 +51040,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "16983009113856606195" + "version": "0.39.26.7824", + "templateHash": "11666262061409473778" }, "name": "Site App Settings", "description": "This module deploys a Site App Setting." @@ -54428,8 +54427,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "11311597701635556530" + "version": "0.39.26.7824", + "templateHash": "904007681755275486" } }, "parameters": { @@ -54523,8 +54522,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.38.33.27573", - "templateHash": "9717690292313179013" + "version": "0.39.26.7824", + "templateHash": "10276790018915749779" } }, "parameters": {