diff --git a/README.md b/README.md index d331527..04ea3c7 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ _Try it out, learn from it, apply it in your setups._ --- -## ๐Ÿš€ Getting Started +## ๐Ÿ—๏ธ Infrastructure Setup ### Quick Start Options @@ -159,11 +159,13 @@ For detailed troubleshooting of setup issues, see [Import Troubleshooting Guide] ๐Ÿ“˜ **For comprehensive troubleshooting including deployment errors, authentication issues, and more, see our main [Troubleshooting Guide](TROUBLESHOOTING.md).** -### โ–ถ๏ธ Running a Sample +## ๐Ÿš€ Running a Sample -1. Locate the specific sample's `create.ipynb` file and adjust the parameters under the `User-defined Parameters` header as you see fit. -1. Ensure that the specified infrastructure already exists in your subscription. If not, proceed to the desired infrastructure folder and execute its `create.ipynb` file. Wait until this completes before continuing. -1. Execute the sample's `create.ipynb` file. +1. Open the desired sample's `create.ipynb` file. +1. Optional: Adjust the parameters under the `User-defined Parameters` header, if desired. +1. Execute the `create.ipynb` Jupyter notebook via `Run All`. + +> A supported infrastructure does not yet need to exist before the sample is executed. The notebook will determine the current state and present you with options to create or select a supported infrastructure, if necessary. Now that infrastructure and sample have been stood up, you can experiment with the policies, make requests against APIM, etc. diff --git a/infrastructure/afd-apim-pe/create.ipynb b/infrastructure/afd-apim-pe/create.ipynb index 639e1b3..1ed1e51 100644 --- a/infrastructure/afd-apim-pe/create.ipynb +++ b/infrastructure/afd-apim-pe/create.ipynb @@ -4,211 +4,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "### ๐Ÿ› ๏ธ Configure Infrastructure Parameters & Create the Infrastructure\n", "\n", - "Configures everything that's needed for deployment. \n", + "Set your desired parameters for the AFD-APIM-PE infrastructure deployment.\n", "\n", - "โ—๏ธ **Modify entries under _1) User-defined parameters_**." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", - "from apimtypes import *\n", - "\n", - "# 1) User-defined parameters (change these as needed)\n", - "rg_location = 'eastus2'\n", - "index = 1\n", - "apim_sku = APIM_SKU.STANDARDV2\n", - "deployment = INFRASTRUCTURE.AFD_APIM_PE\n", - "use_ACA = True\n", - "reveal_backend = True # Set to True to reveal the backend details in the API operations\n", - "\n", - "# 2) Service-defined parameters (please do not change these unless you know what you're doing)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "rg_tags = utils.build_infrastructure_tags(deployment)\n", - "apim_network_mode = APIMNetworkMode.EXTERNAL_VNET\n", - "\n", - "# 3) Set up the policy fragments\n", - "pfs: List[PolicyFragment] = [\n", - " PolicyFragment('AuthZ-Match-All', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-all.xml')), 'Authorizes if all of the specified roles match the JWT role claims.'),\n", - " PolicyFragment('AuthZ-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-any.xml')), 'Authorizes if any of the specified roles match the JWT role claims.'),\n", - " PolicyFragment('Http-Response-200', utils.read_policy_xml(utils.determine_shared_policy_path('pf-http-response-200.xml')), 'Returns a 200 OK response for the current HTTP method.'),\n", - " PolicyFragment('Product-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-product-match-any.xml')), 'Proceeds if any of the specified products match the context product name.'),\n", - " PolicyFragment('Remove-Request-Headers', utils.read_policy_xml(utils.determine_shared_policy_path('pf-remove-request-headers.xml')), 'Removes request headers from the incoming request.')\n", - "]\n", - "\n", - "# 4) Define the APIs and their operations and policies\n", - "\n", - "# Policies\n", - "pol_hello_world = utils.read_policy_xml(HELLO_WORLD_XML_POLICY_PATH)\n", - "\n", - "# Hello World (Root)\n", - "api_hwroot_get = GET_APIOperation('This is a GET for API 1', pol_hello_world)\n", - "api_hwroot = API('hello-world', 'Hello World', '', 'This is the root API for Hello World', operations = [api_hwroot_get])\n", - "\n", - "apis: List[API] = [api_hwroot]\n", - "\n", - "# If Container Apps is enabled, create the ACA APIs in APIM\n", - "if use_ACA:\n", - " utils.print_info('ACA APIs will be created.')\n", - "\n", - " pol_backend = utils.read_policy_xml(BACKEND_XML_POLICY_PATH)\n", - " pol_aca_backend_1 = pol_backend.format(backend_id = 'aca-backend-1')\n", - " pol_aca_backend_2 = pol_backend.format(backend_id = 'aca-backend-2')\n", - " pol_aca_backend_pool = pol_backend.format(backend_id = 'aca-backend-pool')\n", - "\n", - " # Hello World (ACA Backend 1)\n", - " api_hwaca_1_get = GET_APIOperation('This is a GET for Hello World on ACA Backend 1')\n", - " api_hwaca_1 = API('hello-world-aca-1', 'Hello World (ACA 1)', '/aca-1', 'This is the ACA API for Backend 1', policyXml = pol_aca_backend_1, operations = [api_hwaca_1_get])\n", - "\n", - " # Hello World (ACA Backend 2)\n", - " api_hwaca_2_get = GET_APIOperation('This is a GET for Hello World on ACA Backend 2')\n", - " api_hwaca_2 = API('hello-world-aca-2', 'Hello World (ACA 2)', '/aca-2', 'This is the ACA API for Backend 2', policyXml = pol_aca_backend_2, operations = [api_hwaca_2_get])\n", - "\n", - " # Hello World (ACA Backend Pool)\n", - " api_hwaca_pool_get = GET_APIOperation('This is a GET for Hello World on ACA Backend Pool')\n", - " api_hwaca_pool = API('hello-world-aca-pool', 'Hello World (ACA Pool)', '/aca-pool', 'This is the ACA API for Backend Pool', policyXml = pol_aca_backend_pool, operations = [api_hwaca_pool_get])\n", - "\n", - " # Add ACA APIs to the existing apis array\n", - " apis += [api_hwaca_1, api_hwaca_2, api_hwaca_pool]\n", - "\n", - "utils.print_ok('Notebook initialized')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", - "\n", - "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", - "from apimtypes import *\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs and networking mode\n", - "bicep_parameters = {\n", - " 'apimSku' : {'value': apim_sku.value},\n", - " 'apis' : {'value': [api.to_dict() for api in apis]},\n", - " 'policyFragments' : {'value': [pf.to_dict() for pf in pfs]},\n", - " 'apimPublicAccess' : {'value': apim_network_mode in [APIMNetworkMode.PUBLIC, APIMNetworkMode.EXTERNAL_VNET]},\n", - " 'useACA' : {'value': use_ACA}\n", - "}\n", - "\n", - "# 2) Run the deployment\n", - "output = utils.create_bicep_deployment_group(rg_name, rg_location, deployment, bicep_parameters, rg_tags = rg_tags)\n", - "\n", - "# 3) Print a deployment summary, if successful; otherwise, exit with an error\n", - "if not output.success:\n", - " raise SystemExit('Deployment failed')\n", - "\n", - "if output.success and output.json_data:\n", - " apim_service_id = output.get('apimServiceId', 'APIM Service Id')\n", - " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", - " afd_endpoint_url = output.get('fdeSecureUrl', 'Front Door Endpoint URL')\n", - " apim_apis = output.getJson('apiOutputs', 'APIs')\n", - "\n", - "utils.print_ok('Deployment completed')\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### ๐Ÿ”— 3. Approve Front Door private link connection to APIM\n", - "\n", - "In the deployed Bicep template, Azure Front Door will establish a private link connection to the API Management service. This connection should be approved. Run the following command to approve the connection." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", + "โ—๏ธ **Modify entries under _User-defined parameters_**.\n", "\n", - "# Get all pending private endpoint connections as JSON\n", - "output = utils.run(f\"az network private-endpoint-connection list --id {apim_service_id} --query \\\"[?contains(properties.privateLinkServiceConnectionState.status, 'Pending')]\\\" -o json\")\n", - "\n", - "# Handle both a single object and a list of objects\n", - "pending_connections = output.json_data if output.success and output.is_json else []\n", - "\n", - "if isinstance(pending_connections, dict):\n", - " pending_connections = [pending_connections]\n", - "\n", - "total = len(pending_connections)\n", - "utils.print_info(f\"Found {total} pending private link service connection(s).\")\n", - "\n", - "if total > 0:\n", - " for i, conn in enumerate(pending_connections, 1):\n", - " conn_id = conn.get('id')\n", - " conn_name = conn.get('name', '')\n", - " utils.print_info(f\"{i}/{total}: {conn_name}\", True)\n", - "\n", - " approve_result = utils.run(\n", - " f\"az network private-endpoint-connection approve --id {conn_id} --description 'Approved'\",\n", - " f\"Private Link Connection approved: {conn_name}\",\n", - " f\"Failed to approve Private Link Connection: {conn_name}\"\n", - " )\n", - "\n", - " utils.print_ok('Private link approvals completed')\n", - "else:\n", - " utils.print_info('No pending private link service connection was found. There is nothing to approve.')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### โœ… 4. Verify API Request Success via API Management\n", - "\n", - "As we have not yet disabled public access to APIM, this request should succeed with a **200**." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", - "from apimrequests import ApimRequests\n", - "from apimtesting import ApimTesting\n", - "\n", - "tests = ApimTesting(\"AFD-APIM-PE Tests (Pre-Lockdown)\", deployment, deployment)\n", - "\n", - "api_subscription_key = apim_apis[0]['subscriptionPrimaryKey']\n", - "reqs = ApimRequests(apim_gateway_url, api_subscription_key)\n", - "\n", - "utils.print_message('Calling Hello World (Root) API via API Management Gateway URL. Expect 200 (if run before disabling API Management public network access).')\n", - "output = reqs.singleGet('/')\n", - "tests.verify(output, 'Hello World from API Management!')\n", - "\n", - "tests.print_summary()\n", - "\n", - "utils.print_ok('API request via API Management completed')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### ๐Ÿ”’ 5. Disabling API Management public network access\n", - "\n", - "The initial `APIM` service deployment above cannot disable public network access. It must be disabled subsequently below." + "**Note:** This infrastructure includes Azure Front Door with API Management using private endpoints. The creation process includes two phases: initial deployment with public access, private link approval, and then disabling public access." ] }, { @@ -220,84 +22,21 @@ "import utils\n", "from apimtypes import *\n", "\n", - "# 1) Update the Bicep parameters to disable public access to APIM (we only want private endpoint ingress)\n", - "bicep_parameters['apimPublicAccess']['value'] = False\n", + "# User-defined parameters (change these as needed)\n", + "rg_location = 'eastus2' # Azure region for deployment\n", + "index = 1 # Infrastructure index (use different numbers for multiple environments)\n", + "apim_sku = APIM_SKU.STANDARDV2 # Options: 'STANDARDV2', 'PREMIUMV2' (Basic not supported for private endpoints)\n", + "use_aca = True # Include Azure Container Apps backends\n", "\n", - "# 2) Run the deployment\n", - "output = utils.create_bicep_deployment_group(rg_name, rg_location, deployment, bicep_parameters)\n", - "\n", - "# 3) Print a single, clear deployment summary if successful\n", - "if not output.success:\n", - " raise SystemExit('Deployment failed')\n", - " \n", - "if output.success and output.json_data:\n", - " afd_endpoint_url = output.get('fdeSecureUrl', 'Front Door Endpoint URL')\n", - " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", - " apim_apis = output.getJson('apiOutputs', 'APIs')\n", - "\n", - "utils.print_ok('Deployment completed')\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### โœ… 6. Verify API Request Success via Azure Front Door & Failure with API Management\n", + "# Create an instance of the desired infrastructure\n", + "inb_helper = utils.InfrastructureNotebookHelper(rg_location, INFRASTRUCTURE.AFD_APIM_PE, index, apim_sku) \n", + "success = inb_helper.create_infrastructure()\n", "\n", - "At this time only requests through Front Door should be successful and return a **200**. Requests to APIM that worked previously should result in a **403**." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", - "from apimrequests import ApimRequests\n", - "from apimtesting import ApimTesting\n", - "\n", - "tests = ApimTesting(\"AFD-APIM-PE Tests (Post-Lockdown)\", deployment, deployment)\n", - "\n", - "api_subscription_key = apim_apis[0]['subscriptionPrimaryKey']\n", - "reqsApim = ApimRequests(apim_gateway_url, api_subscription_key)\n", - "reqsAfd = ApimRequests(afd_endpoint_url, api_subscription_key)\n", - "\n", - "# 1) Unsuccessful call to APIM Gateway URL (should fail with 403 Forbidden)\n", - "output = reqsApim.singleGet('/', msg = '1) Calling Hello World (Root) API via API Management Gateway URL. Expect 403 as APIM public access is disabled now.')\n", - "outputJson = utils.get_json(output)\n", - "tests.verify(outputJson['statusCode'], 403)\n", - "\n", - "# 2) Successful call to Front Door (200)\n", - "output = reqsAfd.singleGet('/', msg = '2) Calling Hello World (Root) API via Azure Front Door. Expect 200.')\n", - "tests.verify(output, 'Hello World from API Management!')\n", - "\n", - "# 3) Successful calls to Front Door -> APIM -> ACA (200)\n", - "if use_ACA:\n", - " reqsAfd = ApimRequests(afd_endpoint_url, apim_apis[1]['subscriptionPrimaryKey'])\n", - " output = reqsAfd.singleGet('/aca-1', msg = '3) Calling Hello World (ACA 1) API via Azure Front Door. Expect 200.')\n", - " tests.verify(output, 'Hello World!')\n", - "\n", - " reqsAfd = ApimRequests(afd_endpoint_url, apim_apis[2]['subscriptionPrimaryKey'])\n", - " output = reqsAfd.singleGet('/aca-2', msg = '4) Calling Hello World (ACA 2) API via Azure Front Door. Expect 200.')\n", - " tests.verify(output, 'Hello World!')\n", - "\n", - " reqsAfd = ApimRequests(afd_endpoint_url, apim_apis[3]['subscriptionPrimaryKey'])\n", - " output = reqsAfd.singleGet('/aca-pool', msg = '5) Calling Hello World (ACA Pool) API via Azure Front Door. Expect 200.')\n", - " tests.verify(output, 'Hello World!')\n", + "if success:\n", + " utils.print_ok('Infrastructure creation completed successfully!')\n", "else:\n", - " utils.print_message('ACA APIs were not created. Skipping ACA API calls.', blank_above = True)\n", - "\n", - "# 4) Unsuccessful call to Front Door without API subscription key (should fail with 401 Unauthorized)\n", - "reqsNoApiSubscription = ApimRequests(afd_endpoint_url)\n", - "output = reqsNoApiSubscription.singleGet('/', msg = 'Calling Hello World (Root) API without API subscription key. Expect 401.')\n", - "outputJson = utils.get_json(output)\n", - "tests.verify(outputJson['statusCode'], 401)\n", - "tests.verify(outputJson['message'], 'Access denied due to missing subscription key. Make sure to include subscription key when making requests to an API.')\n", - "\n", - "tests.print_summary()\n", - "\n", - "utils.print_ok('All done!')" + " print(\"โŒ Infrastructure creation failed!\")\n", + " raise SystemExit(1)" ] }, { @@ -313,9 +52,9 @@ ], "metadata": { "kernelspec": { - "display_name": "APIM Samples Python 3.12", + "display_name": ".venv (3.12.10)", "language": "python", - "name": "apim-samples" + "name": "python3" }, "language_info": { "codemirror_mode": { diff --git a/infrastructure/afd-apim-pe/create_infrastructure.py b/infrastructure/afd-apim-pe/create_infrastructure.py new file mode 100644 index 0000000..e593611 --- /dev/null +++ b/infrastructure/afd-apim-pe/create_infrastructure.py @@ -0,0 +1,303 @@ +""" +Infrastructure creation module for AFD-APIM-PE. + +This module provides a reusable way to create Azure Front Door with API Management +(Private Endpoint) infrastructure that can be called from notebooks or other scripts. +""" + +import sys +import os +import argparse +from pathlib import Path +import utils +from apimtypes import * +import json + +def _create_afd_apim_pe_infrastructure( + rg_location: str = 'eastus2', + index: int | None = None, + apim_sku: APIM_SKU = APIM_SKU.STANDARDV2, + use_aca: bool = True, + custom_apis: list[API] | None = None, + custom_policy_fragments: list[PolicyFragment] | None = None +) -> utils.Output: + """ + Create AFD-APIM-PE infrastructure with the specified parameters. + + Args: + rg_location (str): Azure region for deployment. Defaults to 'eastus2'. + index (int | None): Index for the infrastructure. Defaults to None (no index). + apim_sku (APIM_SKU): SKU for API Management. Defaults to STANDARDV2. + use_aca (bool): Whether to include Azure Container Apps. Defaults to True. + custom_apis (list[API] | None): Custom APIs to deploy. If None, uses default Hello World API. + custom_policy_fragments (list[PolicyFragment] | None): Custom policy fragments. If None, uses defaults. + + Returns: + utils.Output: The deployment result. + """ + + # 1) Setup deployment parameters + deployment = INFRASTRUCTURE.AFD_APIM_PE + rg_name = utils.get_infra_rg_name(deployment, index) + rg_tags = utils.build_infrastructure_tags(deployment) + apim_network_mode = APIMNetworkMode.EXTERNAL_VNET + + print(f"\n๐Ÿš€ Creating AFD-APIM-PE infrastructure...") + print(f" Location : {rg_location}") + print(f" Index : {index}") + print(f" Infrastructure : {deployment.value}") + print(f" APIM SKU : {apim_sku.value}") + print(f" Use ACA : {use_aca}") + print(f" Resource Group : {rg_name}\n") + + # 2) Set up the policy fragments + if custom_policy_fragments is None: + pfs: List[PolicyFragment] = [ + PolicyFragment('AuthZ-Match-All', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-all.xml')), 'Authorizes if all of the specified roles match the JWT role claims.'), + PolicyFragment('AuthZ-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-any.xml')), 'Authorizes if any of the specified roles match the JWT role claims.'), + PolicyFragment('Http-Response-200', utils.read_policy_xml(utils.determine_shared_policy_path('pf-http-response-200.xml')), 'Returns a 200 OK response for the current HTTP method.'), + PolicyFragment('Product-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-product-match-any.xml')), 'Proceeds if any of the specified products match the context product name.'), + PolicyFragment('Remove-Request-Headers', utils.read_policy_xml(utils.determine_shared_policy_path('pf-remove-request-headers.xml')), 'Removes request headers from the incoming request.') + ] + else: + pfs = custom_policy_fragments + + # 3) Define the APIs + if custom_apis is None: + # Default Hello World API + pol_hello_world = utils.read_policy_xml(HELLO_WORLD_XML_POLICY_PATH) + api_hwroot_get = GET_APIOperation('This is a GET for API 1', pol_hello_world) + api_hwroot = API('hello-world', 'Hello World', '', 'This is the root API for Hello World', operations = [api_hwroot_get]) + apis: List[API] = [api_hwroot] + + # If Container Apps is enabled, create the ACA APIs in APIM + if use_aca: + pol_backend = utils.read_policy_xml(BACKEND_XML_POLICY_PATH) + pol_aca_backend_1 = pol_backend.format(backend_id = 'aca-backend-1') + pol_aca_backend_2 = pol_backend.format(backend_id = 'aca-backend-2') + pol_aca_backend_pool = pol_backend.format(backend_id = 'aca-backend-pool') + + # API 1: Hello World (ACA Backend 1) + api_hwaca_1_get = GET_APIOperation('This is a GET for Hello World on ACA Backend 1') + api_hwaca_1 = API('hello-world-aca-1', 'Hello World (ACA 1)', '/aca-1', 'This is the ACA API for Backend 1', pol_aca_backend_1, [api_hwaca_1_get]) + + # API 2: Hello World (ACA Backend 2) + api_hwaca_2_get = GET_APIOperation('This is a GET for Hello World on ACA Backend 2') + api_hwaca_2 = API('hello-world-aca-2', 'Hello World (ACA 2)', '/aca-2', 'This is the ACA API for Backend 2', pol_aca_backend_2, [api_hwaca_2_get]) + + # API 3: Hello World (ACA Backend Pool) + api_hwaca_pool_get = GET_APIOperation('This is a GET for Hello World on ACA Backend Pool') + api_hwaca_pool = API('hello-world-aca-pool', 'Hello World (ACA Pool)', '/aca-pool', 'This is the ACA API for Backend Pool', pol_aca_backend_pool, [api_hwaca_pool_get]) + + # Add ACA APIs to the existing apis array + apis += [api_hwaca_1, api_hwaca_2, api_hwaca_pool] + else: + apis = custom_apis + + # 4) Define the Bicep parameters with serialized APIs + bicep_parameters = { + 'apimSku' : {'value': apim_sku.value}, + 'apis' : {'value': [api.to_dict() for api in apis]}, + 'policyFragments' : {'value': [pf.to_dict() for pf in pfs]}, + 'apimPublicAccess' : {'value': apim_network_mode in [APIMNetworkMode.PUBLIC, APIMNetworkMode.EXTERNAL_VNET]}, + 'useACA' : {'value': use_aca} + } + + # 5) Change to the infrastructure directory to ensure bicep files are found + original_cwd = os.getcwd() + infra_dir = Path(__file__).parent + + try: + os.chdir(infra_dir) + print(f"๐Ÿ“ Changed working directory to: {infra_dir}") + + # 6) Create the resource group if it doesn't exist + utils.create_resource_group(rg_name, rg_location, rg_tags) + + # 7) First deployment with public access enabled + print("\n๐Ÿš€ Phase 1: Creating infrastructure with public access enabled...") + output = utils.create_bicep_deployment_group(rg_name, rg_location, deployment, bicep_parameters) + + if not output.success: + print("โŒ Phase 1 deployment failed!") + return output + + # Extract service details for private link approval + if output.json_data: + apim_service_id = output.get('apimServiceId', 'APIM Service Id', suppress_logging = True) + + print("โœ… Phase 1 deployment completed successfully!") + + # 8) Approve private link connections + print("\n๐Ÿ”— Approving Front Door private link connections...") + _approve_private_link_connections(apim_service_id) + + # 9) Second deployment to disable public access + print("\n๐Ÿ”’ Phase 2: Disabling APIM public access...") + bicep_parameters['apimPublicAccess']['value'] = False + + output = utils.create_bicep_deployment_group(rg_name, rg_location, deployment, bicep_parameters) + + if output.success: + print("\nโœ… Infrastructure creation completed successfully!") + if output.json_data: + apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL', suppress_logging = True) + afd_endpoint_url = output.get('fdeSecureUrl', 'Front Door Endpoint URL', suppress_logging = True) + apim_apis = output.getJson('apiOutputs', 'APIs', suppress_logging = True) + + print(f"\n๐Ÿ“‹ Infrastructure Details:") + print(f" Resource Group : {rg_name}") + print(f" Location : {rg_location}") + print(f" APIM SKU : {apim_sku.value}") + print(f" Use ACA : {use_aca}") + print(f" Gateway URL : {apim_gateway_url}") + print(f" Front Door URL : {afd_endpoint_url}") + print(f" APIs Created : {len(apim_apis)}") + + # Perform basic verification + _verify_infrastructure(rg_name, afd_endpoint_url, apim_gateway_url, use_aca) + else: + print("โŒ Phase 2 deployment failed!") + + return output + + finally: + # Always restore the original working directory + os.chdir(original_cwd) + print(f"๐Ÿ“ Restored working directory to: {original_cwd}") + +def _approve_private_link_connections(apim_service_id: str) -> None: + """ + Approve pending private link connections for the APIM service. + + Args: + apim_service_id (str): The resource ID of the APIM service. + """ + + # Get all pending private endpoint connections as JSON + output = utils.run(f"az network private-endpoint-connection list --id {apim_service_id} --query \"[?contains(properties.privateLinkServiceConnectionState.status, 'Pending')]\" -o json", print_command_to_run = False) + + # Handle both a single object and a list of objects + pending_connections = output.json_data if output.success and output.is_json else [] + + if isinstance(pending_connections, dict): + pending_connections = [pending_connections] + + total = len(pending_connections) + print(f"Found {total} pending private link service connection(s).") + + if total > 0: + for i, conn in enumerate(pending_connections, 1): + conn_id = conn.get('id') + conn_name = conn.get('name', '') + print(f" {i}/{total}: Approving {conn_name}") + + approve_result = utils.run( + f"az network private-endpoint-connection approve --id {conn_id} --description 'Approved'", + f"Private Link Connection approved: {conn_name}", + f"Failed to approve Private Link Connection: {conn_name}", + print_command_to_run = False + ) + + print("โœ… Private link approvals completed") + else: + print("No pending private link service connections found. Nothing to approve.") + +def _verify_infrastructure(rg_name: str, afd_endpoint_url: str, apim_gateway_url: str, use_aca: bool) -> bool: + """ + Verify that the infrastructure was created successfully. + + Args: + rg_name (str): Resource group name. + afd_endpoint_url (str): Azure Front Door endpoint URL. + apim_gateway_url (str): API Management gateway URL. + use_aca (bool): Whether Container Apps were included. + + Returns: + bool: True if verification passed, False otherwise. + """ + + print("\n๐Ÿ” Verifying infrastructure...") + + try: + # Check if the resource group exists + if not utils.does_resource_group_exist(rg_name): + print("โŒ Resource group does not exist!") + return False + + print("โœ… Resource group verified") + + # Get APIM service details + output = utils.run(f'az apim list -g {rg_name} --query "[0]" -o json', print_command_to_run = False, print_errors = False) + + if output.success and output.json_data: + apim_name = output.json_data.get('name') + print(f"โœ… APIM Service verified: {apim_name}") + + # Check Front Door + afd_output = utils.run(f'az afd profile list -g {rg_name} --query "[0]" -o json', print_command_to_run = False, print_errors = False) + + if afd_output.success and afd_output.json_data: + afd_name = afd_output.json_data.get('name') + print(f"โœ… Azure Front Door verified: {afd_name}") + + # Check Container Apps if enabled + if use_aca: + aca_output = utils.run(f'az containerapp list -g {rg_name} --query "length(@)"', print_command_to_run = False, print_errors = False) + + if aca_output.success: + aca_count = int(aca_output.text.strip()) + print(f"โœ… Container Apps verified: {aca_count} app(s) created") + + print("\n๐ŸŽ‰ Infrastructure verification completed successfully!") + return True + + else: + print("\nโŒ APIM service not found!") + return False + + except Exception as e: + print(f"\nโš ๏ธ Verification failed with error: {str(e)}") + return False + +def main(): + """ + Main entry point for command-line usage. + """ + + parser = argparse.ArgumentParser(description='Create AFD-APIM-PE infrastructure') + parser.add_argument('--location', default='eastus2', help='Azure region (default: eastus2)') + parser.add_argument('--index', type=int, help='Infrastructure index') + parser.add_argument('--sku', choices=['Standardv2', 'Premiumv2'], default='Standardv2', help='APIM SKU (default: Standardv2)') + parser.add_argument('--no-aca', action='store_true', help='Disable Azure Container Apps') + + args = parser.parse_args() + + # Convert SKU string to enum + sku_map = { + 'Standardv2': APIM_SKU.STANDARDV2, + 'Premiumv2': APIM_SKU.PREMIUMV2 + } + + try: + result = _create_afd_apim_pe_infrastructure( + rg_location = args.location, + index = args.index, + apim_sku = sku_map[args.sku], + use_aca = not args.no_aca + ) + + if result.success: + print("\n๐ŸŽ‰ Infrastructure creation completed successfully!") + sys.exit(0) + else: + print("\n๐Ÿ’ฅ Infrastructure creation failed!") + sys.exit(1) + + except Exception as e: + print(f"\n๐Ÿ’ฅ Error: {str(e)}") + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/infrastructure/apim-aca/create.ipynb b/infrastructure/apim-aca/create.ipynb index 4ff47e8..2ae9308 100644 --- a/infrastructure/apim-aca/create.ipynb +++ b/infrastructure/apim-aca/create.ipynb @@ -4,11 +4,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "### ๐Ÿ› ๏ธ Configure Infrastructure Parameters & Create the Infrastructure\n", "\n", - "Configures everything that's needed for deployment. \n", + "Set your desired parameters for the APIM-ACA infrastructure deployment.\n", "\n", - "โ—๏ธ **Modify entries under _1) User-defined parameters_**." + "โ—๏ธ **Modify entries under _User-defined parameters_**." ] }, { @@ -20,147 +20,21 @@ "import utils\n", "from apimtypes import *\n", "\n", - "# 1) User-defined parameters (change these as needed)\n", - "rg_location = 'eastus2'\n", - "index = 1\n", - "apim_sku = APIM_SKU.BASICV2\n", - "deployment = INFRASTRUCTURE.APIM_ACA\n", - "reveal_backend = True # Set to True to reveal the backend details in the API operations\n", - "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "rg_tags = utils.build_infrastructure_tags(deployment)\n", - "\n", - "# 3) Set up the policy fragments\n", - "pfs: List[PolicyFragment] = [\n", - " PolicyFragment('AuthZ-Match-All', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-all.xml')), 'Authorizes if all of the specified roles match the JWT role claims.'),\n", - " PolicyFragment('AuthZ-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-any.xml')), 'Authorizes if any of the specified roles match the JWT role claims.'),\n", - " PolicyFragment('Http-Response-200', utils.read_policy_xml(utils.determine_shared_policy_path('pf-http-response-200.xml')), 'Returns a 200 OK response for the current HTTP method.'),\n", - " PolicyFragment('Product-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-product-match-any.xml')), 'Proceeds if any of the specified products match the context product name.'),\n", - " PolicyFragment('Remove-Request-Headers', utils.read_policy_xml(utils.determine_shared_policy_path('pf-remove-request-headers.xml')), 'Removes request headers from the incoming request.')\n", - "]\n", - "\n", - "# 4) Define the APIs and their operations and policies\n", - "\n", - "# Policies\n", - "pol_hello_world = utils.read_policy_xml(HELLO_WORLD_XML_POLICY_PATH)\n", - "pol_backend = utils.read_policy_xml(BACKEND_XML_POLICY_PATH)\n", - "pol_aca_backend_1 = pol_backend.format(backend_id = 'aca-backend-1')\n", - "pol_aca_backend_2 = pol_backend.format(backend_id = 'aca-backend-2')\n", - "pol_aca_backend_pool = pol_backend.format(backend_id = 'aca-backend-pool')\n", - "\n", - "# Hello World (Root)\n", - "api_hwroot_get = GET_APIOperation('This is a GET for Hello World in the root', pol_hello_world)\n", - "api_hwroot = API('hello-world', 'Hello World', '', 'This is the root API for Hello World', operations = [api_hwroot_get])\n", - "\n", - "# Hello World (ACA Backend 1)\n", - "api_hwaca_1_get = GET_APIOperation('This is a GET for Hello World on ACA Backend 1')\n", - "api_hwaca_1 = API('hello-world-aca-1', 'Hello World (ACA 1)', '/aca-1', 'This is the ACA API for Backend 1', policyXml = pol_aca_backend_1, operations = [api_hwaca_1_get])\n", - "\n", - "# Hello World (ACA Backend 2)\n", - "api_hwaca_2_get = GET_APIOperation('This is a GET for Hello World on ACA Backend 2')\n", - "api_hwaca_2 = API('hello-world-aca-2', 'Hello World (ACA 2)', '/aca-2', 'This is the ACA API for Backend 2', policyXml = pol_aca_backend_2, operations = [api_hwaca_2_get])\n", - "\n", - "# Hello World (ACA Backend Pool)\n", - "api_hwaca_pool_get = GET_APIOperation('This is a GET for Hello World on ACA Backend Pool')\n", - "api_hwaca_pool = API('hello-world-aca-pool', 'Hello World (ACA Pool)', '/aca-pool', 'This is the ACA API for Backend Pool', policyXml = pol_aca_backend_pool, operations = [api_hwaca_pool_get])\n", - "\n", - "# APIs Array\n", - "apis: List[API] = [api_hwroot, api_hwaca_1, api_hwaca_2, api_hwaca_pool]\n", - "\n", - "utils.print_ok('Notebook initialized')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", - "\n", - "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", - "bicep_parameters = {\n", - " 'apimSku' : {'value': apim_sku.value},\n", - " 'apis' : {'value': [api.to_dict() for api in apis]},\n", - " 'policyFragments' : {'value': [pf.to_dict() for pf in pfs]},\n", - " 'revealBackendApiInfo' : {'value:': reveal_backend} \n", - "}\n", - "\n", - "# 2) Run the deployment\n", - "output = utils.create_bicep_deployment_group(rg_name, rg_location, deployment, bicep_parameters, rg_tags = rg_tags)\n", - "\n", - "# 3) Check the deployment outputs\n", - "if not output.success:\n", - " raise SystemExit('Deployment failed')\n", - "\n", - "if output.success and output.json_data:\n", - " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", - " aca_url_1 = output.get('acaUrl1', 'ACA Backend 1 URL')\n", - " aca_url_2 = output.get('acaUrl2', 'ACA Backend 2 URL')\n", - " apim_apis = output.getJson('apiOutputs', 'APIs')\n", - "\n", - "utils.print_ok('Deployment completed')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### โœ… 3. Verify API Request Success\n", - "\n", - "Assert that the deployment was successful by making simple calls to APIM. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", - "from apimrequests import ApimRequests\n", - "from apimtesting import ApimTesting\n", - "\n", - "tests = ApimTesting(\"APIM-ACA Tests\", deployment, deployment)\n", - "\n", - "reqs = ApimRequests(apim_gateway_url, apim_apis[0]['subscriptionPrimaryKey'])\n", - "output = reqs.singleGet('/', msg = 'Calling Hello World (Root) API')\n", - "tests.verify(output, 'Hello World from API Management!')\n", - "\n", - "reqs = ApimRequests(apim_gateway_url, apim_apis[1]['subscriptionPrimaryKey'])\n", - "output = reqs.singleGet('/aca-1/', msg = 'Calling Hello World (ACA Backend 1) API')\n", - "tests.verify(output, 'Hello World!')\n", - "\n", - "reqs = ApimRequests(apim_gateway_url, apim_apis[2]['subscriptionPrimaryKey'])\n", - "output = reqs.singleGet('/aca-2/', msg = 'Calling Hello World (ACA Backend 2) API')\n", - "tests.verify(output, 'Hello World!')\n", - "\n", - "reqs = ApimRequests(apim_gateway_url, apim_apis[3]['subscriptionPrimaryKey'])\n", - "output = reqs.multiGet('/aca-pool/', 3, msg = 'Calling Hello World (ACA Backend Pool) API')\n", - "tests.verify(len(output), 3)\n", - "tests.verify(output[0]['response'], 'Hello World!')\n", - "tests.verify(output[1]['response'], 'Hello World!')\n", - "tests.verify(output[2]['response'], 'Hello World!')\n", - "\n", - "reqsNoApiSubscription = ApimRequests(apim_gateway_url)\n", - "output = reqsNoApiSubscription.singleGet('/', msg = 'Calling Hello World (Root) API without API subscription key. Expect 401.')\n", - "outputJson = utils.get_json(output)\n", - "tests.verify(outputJson['statusCode'], 401)\n", - "tests.verify(outputJson['message'], 'Access denied due to missing subscription key. Make sure to include subscription key when making requests to an API.')\n", - "\n", - "tests.print_summary()\n", - "\n", - "utils.print_ok('All done!')" + "# User-defined parameters (change these as needed)\n", + "rg_location = 'eastus2' # Azure region for deployment\n", + "index = 1 # Infrastructure index (use different numbers for multiple environments)\n", + "apim_sku = APIM_SKU.BASICV2 # Options: 'BASICV2', 'STANDARDV2', 'PREMIUMV2'\n", + "reveal_backend = True # Set to True to reveal the backend details in the API operations\n", + "\n", + "# Create an instance of the desired infrastructure\n", + "inb_helper = utils.InfrastructureNotebookHelper(rg_location, INFRASTRUCTURE.APIM_ACA, index, apim_sku) \n", + "success = inb_helper.create_infrastructure()\n", + "\n", + "if success:\n", + " utils.print_ok('Infrastructure creation completed successfully!')\n", + "else:\n", + " print(\"โŒ Infrastructure creation failed!\")\n", + " raise SystemExit(1)" ] }, { diff --git a/infrastructure/apim-aca/create_infrastructure.py b/infrastructure/apim-aca/create_infrastructure.py new file mode 100644 index 0000000..19fcfd1 --- /dev/null +++ b/infrastructure/apim-aca/create_infrastructure.py @@ -0,0 +1,275 @@ +""" +Infrastructure creation module for APIM-ACA. + +This module provides a reusable way to create API Management with Azure Container Apps +infrastructure that can be called from notebooks or other scripts. +""" + +import sys +import os +import argparse +from pathlib import Path +import utils +from apimtypes import * +import json + +def _create_apim_aca_infrastructure( + rg_location: str = 'eastus2', + index: int | None = None, + apim_sku: APIM_SKU = APIM_SKU.BASICV2, + reveal_backend: bool = True, + custom_apis: list[API] | None = None, + custom_policy_fragments: list[PolicyFragment] | None = None +) -> utils.Output: + """ + Create APIM-ACA infrastructure with the specified parameters. + + Args: + rg_location (str): Azure region for deployment. Defaults to 'eastus2'. + index (int | None): Index for the infrastructure. Defaults to None (no index). + apim_sku (APIM_SKU): SKU for API Management. Defaults to BASICV2. + reveal_backend (bool): Whether to reveal backend details in API operations. Defaults to True. + custom_apis (list[API] | None): Custom APIs to deploy. If None, uses default Hello World API. + custom_policy_fragments (list[PolicyFragment] | None): Custom policy fragments. If None, uses defaults. + + Returns: + utils.Output: The deployment result. + """ + + # 1) Setup deployment parameters + deployment = INFRASTRUCTURE.APIM_ACA + rg_name = utils.get_infra_rg_name(deployment, index) + rg_tags = utils.build_infrastructure_tags(deployment) + + print(f"\n๐Ÿš€ Creating APIM-ACA infrastructure...") + print(f" Location : {rg_location}") + print(f" Index : {index}") + print(f" Infrastructure : {deployment.value}") + print(f" APIM SKU : {apim_sku.value}") + print(f" Reveal Backend : {reveal_backend}") + print(f" Resource Group : {rg_name}\n") + + # 2) Set up the policy fragments + if custom_policy_fragments is None: + pfs: List[PolicyFragment] = [ + PolicyFragment('AuthZ-Match-All', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-all.xml')), 'Authorizes if all of the specified roles match the JWT role claims.'), + PolicyFragment('AuthZ-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-any.xml')), 'Authorizes if any of the specified roles match the JWT role claims.'), + PolicyFragment('Http-Response-200', utils.read_policy_xml(utils.determine_shared_policy_path('pf-http-response-200.xml')), 'Returns a 200 OK response for the current HTTP method.'), + PolicyFragment('Product-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-product-match-any.xml')), 'Proceeds if any of the specified products match the context product name.'), + PolicyFragment('Remove-Request-Headers', utils.read_policy_xml(utils.determine_shared_policy_path('pf-remove-request-headers.xml')), 'Removes request headers from the incoming request.') + ] + else: + pfs = custom_policy_fragments + + # 3) Define the APIs + if custom_apis is None: + # Default APIs with Container Apps backends + pol_hello_world = utils.read_policy_xml(HELLO_WORLD_XML_POLICY_PATH) + pol_backend = utils.read_policy_xml(BACKEND_XML_POLICY_PATH) + pol_aca_backend_1 = pol_backend.format(backend_id = 'aca-backend-1') + pol_aca_backend_2 = pol_backend.format(backend_id = 'aca-backend-2') + pol_aca_backend_pool = pol_backend.format(backend_id = 'aca-backend-pool') + + # Hello World (Root) + api_hwroot_get = GET_APIOperation('This is a GET for Hello World in the root', pol_hello_world) + api_hwroot = API('hello-world', 'Hello World', '', 'This is the root API for Hello World', operations = [api_hwroot_get]) + + # Hello World (ACA Backend 1) + api_hwaca_1_get = GET_APIOperation('This is a GET for Hello World on ACA Backend 1') + api_hwaca_1 = API('hello-world-aca-1', 'Hello World (ACA 1)', '/aca-1', 'This is the ACA API for Backend 1', policyXml = pol_aca_backend_1, operations = [api_hwaca_1_get]) + + # Hello World (ACA Backend 2) + api_hwaca_2_get = GET_APIOperation('This is a GET for Hello World on ACA Backend 2') + api_hwaca_2 = API('hello-world-aca-2', 'Hello World (ACA 2)', '/aca-2', 'This is the ACA API for Backend 2', policyXml = pol_aca_backend_2, operations = [api_hwaca_2_get]) + + # Hello World (ACA Backend Pool) + api_hwaca_pool_get = GET_APIOperation('This is a GET for Hello World on ACA Backend Pool') + api_hwaca_pool = API('hello-world-aca-pool', 'Hello World (ACA Pool)', '/aca-pool', 'This is the ACA API for Backend Pool', policyXml = pol_aca_backend_pool, operations = [api_hwaca_pool_get]) + + # APIs Array + apis: List[API] = [api_hwroot, api_hwaca_1, api_hwaca_2, api_hwaca_pool] + else: + apis = custom_apis + + # 4) Define the Bicep parameters with serialized APIs + bicep_parameters = { + 'apimSku' : {'value': apim_sku.value}, + 'apis' : {'value': [api.to_dict() for api in apis]}, + 'policyFragments' : {'value': [pf.to_dict() for pf in pfs]}, + 'revealBackendApiInfo' : {'value': reveal_backend} + } + + # 5) Change to the infrastructure directory to ensure bicep files are found + original_cwd = os.getcwd() + infra_dir = Path(__file__).parent + + try: + os.chdir(infra_dir) + print(f"๐Ÿ“ Changed working directory to: {infra_dir}") + + # 6) Prepare deployment parameters and run directly to avoid path detection issues + bicep_parameters_format = { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": bicep_parameters + } + + # Write the parameters file + params_file_path = infra_dir / 'params.json' + + with open(params_file_path, 'w') as file: + file.write(json.dumps(bicep_parameters_format)) + + print(f"๐Ÿ“ Updated the policy XML in the bicep parameters file 'params.json'") + + # Create the resource group if it doesn't exist + utils.create_resource_group(rg_name, rg_location, rg_tags) + + # Run the deployment directly + main_bicep_path = infra_dir / 'main.bicep' + output = utils.run( + f'az deployment group create --name {deployment.value} --resource-group {rg_name} --template-file "{main_bicep_path}" --parameters "{params_file_path}" --query "properties.outputs"', + f"Deployment '{deployment.value}' succeeded", + f"Deployment '{deployment.value}' failed." + ) + + # 7) Check the deployment results and perform verification + if output.success: + print("\nโœ… Infrastructure creation completed successfully!") + if output.json_data: + apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL', suppress_logging = True) + aca_url_1 = output.get('acaUrl1', 'ACA Backend 1 URL', suppress_logging = True) + aca_url_2 = output.get('acaUrl2', 'ACA Backend 2 URL', suppress_logging = True) + apim_apis = output.getJson('apiOutputs', 'APIs', suppress_logging = True) + + print(f"\n๐Ÿ“‹ Infrastructure Details:") + print(f" Resource Group : {rg_name}") + print(f" Location : {rg_location}") + print(f" APIM SKU : {apim_sku.value}") + print(f" Reveal Backend : {reveal_backend}") + print(f" Gateway URL : {apim_gateway_url}") + print(f" ACA Backend 1 : {aca_url_1}") + print(f" ACA Backend 2 : {aca_url_2}") + print(f" APIs Created : {len(apim_apis)}") + + # Perform basic verification + _verify_infrastructure(rg_name, apim_gateway_url) + else: + print("โŒ Infrastructure creation failed!") + + return output + + finally: + # Always restore the original working directory + os.chdir(original_cwd) + print(f"๐Ÿ“ Restored working directory to: {original_cwd}") + +def _verify_infrastructure(rg_name: str, apim_gateway_url: str) -> bool: + """ + Verify that the infrastructure was created successfully. + + Args: + rg_name (str): Resource group name. + apim_gateway_url (str): API Management gateway URL. + + Returns: + bool: True if verification passed, False otherwise. + """ + + print("\n๐Ÿ” Verifying infrastructure...") + + try: + # Check if the resource group exists + if not utils.does_resource_group_exist(rg_name): + print("โŒ Resource group does not exist!") + return False + + print("โœ… Resource group verified") + + # Get APIM service details + output = utils.run(f'az apim list -g {rg_name} --query "[0]" -o json', print_command_to_run = False, print_errors = False) + + if output.success and output.json_data: + apim_name = output.json_data.get('name') + print(f"โœ… APIM Service verified: {apim_name}") + + # Get Container Apps count + aca_output = utils.run(f'az containerapp list -g {rg_name} --query "length(@)"', print_command_to_run = False, print_errors = False) + + if aca_output.success: + aca_count = int(aca_output.text.strip()) + print(f"โœ… Container Apps verified: {aca_count} app(s) created") + + # Get API count + api_output = utils.run(f'az apim api list --service-name {apim_name} -g {rg_name} --query "length(@)"', + print_command_to_run = False, print_errors = False) + + if api_output.success: + api_count = int(api_output.text.strip()) + print(f"โœ… APIs verified: {api_count} API(s) created") + + # Test basic connectivity (optional) + if api_count > 0: + try: + # Get subscription key for testing + sub_output = utils.run(f'az apim subscription list --service-name {apim_name} -g {rg_name} --query "[0].primaryKey" -o tsv', + print_command_to_run = False, print_errors = False) + + if sub_output.success and sub_output.text.strip(): + print("โœ… Subscription key available for API testing") + except: + pass + + print("\n๐ŸŽ‰ Infrastructure verification completed successfully!") + return True + + else: + print("\nโŒ APIM service not found!") + return False + + except Exception as e: + print(f"\nโš ๏ธ Verification failed with error: {str(e)}") + return False + +def main(): + """ + Main entry point for command-line usage. + """ + + parser = argparse.ArgumentParser(description='Create APIM-ACA infrastructure') + parser.add_argument('--location', default='eastus2', help='Azure region (default: eastus2)') + parser.add_argument('--index', type=int, help='Infrastructure index') + parser.add_argument('--sku', choices=['Basicv2', 'Standardv2', 'Premiumv2'], default='Basicv2', help='APIM SKU (default: Basicv2)') + parser.add_argument('--no-reveal-backend', action='store_true', help='Do not reveal backend details in API operations') + + args = parser.parse_args() + + # Convert SKU string to enum + sku_map = { + 'Basicv2': APIM_SKU.BASICV2, + 'Standardv2': APIM_SKU.STANDARDV2, + 'Premiumv2': APIM_SKU.PREMIUMV2 + } + + try: + result = _create_apim_aca_infrastructure( + rg_location = args.location, + index = args.index, + apim_sku = sku_map[args.sku], + reveal_backend = not args.no_reveal_backend + ) + + if result.success: + print("\n๐ŸŽ‰ Infrastructure creation completed successfully!") + sys.exit(0) + else: + print("\n๐Ÿ’ฅ Infrastructure creation failed!") + sys.exit(1) + + except Exception as e: + print(f"\n๐Ÿ’ฅ Error: {str(e)}") + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/infrastructure/simple-apim/clean-up.ipynb b/infrastructure/simple-apim/clean-up.ipynb index 9307142..fe9d713 100644 --- a/infrastructure/simple-apim/clean-up.ipynb +++ b/infrastructure/simple-apim/clean-up.ipynb @@ -27,9 +27,9 @@ ], "metadata": { "kernelspec": { - "display_name": "APIM Samples Python 3.12", + "display_name": ".venv (3.12.10)", "language": "python", - "name": "apim-samples" + "name": "python3" }, "language_info": { "codemirror_mode": { diff --git a/infrastructure/simple-apim/create.ipynb b/infrastructure/simple-apim/create.ipynb index 1279cdd..e19eed8 100644 --- a/infrastructure/simple-apim/create.ipynb +++ b/infrastructure/simple-apim/create.ipynb @@ -4,11 +4,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "### ๐Ÿ› ๏ธ Configure Infrastructure Parameters & Create the Infrastructure\n", "\n", - "Configures everything that's needed for deployment. \n", + "Set your desired parameters for the Simple APIM infrastructure deployment.\n", "\n", - "โ—๏ธ **Modify entries under _1) User-defined parameters_**." + "โ—๏ธ **Modify entries under _User-defined parameters_**." ] }, { @@ -20,116 +20,20 @@ "import utils\n", "from apimtypes import *\n", "\n", - "# 1) User-defined parameters (change these as needed)\n", - "rg_location = 'eastus2'\n", - "index = 1\n", - "apim_sku = APIM_SKU.BASICV2\n", - "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", - "reveal_backend = True # Set to True to reveal the backend details in the API operations\n", + "# User-defined parameters (change these as needed)\n", + "rg_location = 'eastus2' # Azure region for deployment\n", + "index = 1 # Infrastructure index (use different numbers for multiple environments)\n", + "apim_sku = APIM_SKU.BASICV2 # Options: 'BASICV2', 'STANDARDV2', 'PREMIUMV2'\n", "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "rg_tags = utils.build_infrastructure_tags(deployment)\n", + "# Create an instance of the desired infrastructure\n", + "inb_helper = utils.InfrastructureNotebookHelper(rg_location, INFRASTRUCTURE.SIMPLE_APIM, index, apim_sku) \n", + "success = inb_helper.create_infrastructure()\n", "\n", - "# 3) Set up the policy fragments\n", - "pfs: List[PolicyFragment] = [\n", - " PolicyFragment('AuthZ-Match-All', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-all.xml')), 'Authorizes if all of the specified roles match the JWT role claims.'),\n", - " PolicyFragment('AuthZ-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-any.xml')), 'Authorizes if any of the specified roles match the JWT role claims.'),\n", - " PolicyFragment('Http-Response-200', utils.read_policy_xml(utils.determine_shared_policy_path('pf-http-response-200.xml')), 'Returns a 200 OK response for the current HTTP method.'),\n", - " PolicyFragment('Product-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-product-match-any.xml')), 'Proceeds if any of the specified products match the context product name.'),\n", - " PolicyFragment('Remove-Request-Headers', utils.read_policy_xml(utils.determine_shared_policy_path('pf-remove-request-headers.xml')), 'Removes request headers from the incoming request.')\n", - "]\n", - "\n", - "# 4) Define the APIs and their operations and policies\n", - "\n", - "# Policies\n", - "pol_hello_world = utils.read_policy_xml(HELLO_WORLD_XML_POLICY_PATH)\n", - "\n", - "# Hello World (Root)\n", - "api_hwroot_get = GET_APIOperation('This is a GET for API 1', pol_hello_world)\n", - "api_hwroot = API('hello-world', 'Hello World', '', 'This is the root API for Hello World', operations = [api_hwroot_get])\n", - "\n", - "# APIs Array\n", - "apis: List[API] = [api_hwroot]\n", - "\n", - "utils.print_ok('Notebook initialized')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", - "\n", - "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", - "bicep_parameters = {\n", - " 'apimSku' : {'value': apim_sku.value},\n", - " 'apis' : {'value': [api.to_dict() for api in apis]},\n", - " 'policyFragments' : {'value': [pf.to_dict() for pf in pfs]},\n", - " 'revealBackendApiInfo' : {'value:': reveal_backend}\n", - "}\n", - "\n", - "# 2) Run the deployment\n", - "output = utils.create_bicep_deployment_group(rg_name, rg_location, deployment, bicep_parameters, rg_tags = rg_tags)\n", - "\n", - "# 3) Check the deployment outputs\n", - "if not output.success:\n", - " raise SystemExit('Deployment failed')\n", - "\n", - "if output.success and output.json_data:\n", - " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", - " apim_apis = output.getJson('apiOutputs', 'APIs')\n", - "\n", - "utils.print_ok('Deployment completed')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### โœ… 3. Verify API Request Success\n", - "\n", - "Assert that the deployment was successful by making simple calls to APIM. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import utils\n", - "from apimrequests import ApimRequests\n", - "from apimtesting import ApimTesting\n", - "\n", - "tests = ApimTesting(\"Simple APIM Tests\", deployment, deployment)\n", - "\n", - "api_subscription_key = apim_apis[0]['subscriptionPrimaryKey']\n", - "reqs = ApimRequests(apim_gateway_url, api_subscription_key)\n", - "\n", - "output = reqs.singleGet('/', msg = 'Calling Hello World (Root) API. Expect 200.')\n", - "tests.verify(output, 'Hello World from API Management!')\n", - "\n", - "reqsNoApiSubscription = ApimRequests(apim_gateway_url)\n", - "output = reqsNoApiSubscription.singleGet('/', msg = 'Calling Hello World (Root) API without API subscription key. Expect 401.')\n", - "outputJson = utils.get_json(output)\n", - "tests.verify(outputJson['statusCode'], 401)\n", - "tests.verify(outputJson['message'], 'Access denied due to missing subscription key. Make sure to include subscription key when making requests to an API.')\n", - "\n", - "tests.print_summary()\n", - "\n", - "utils.print_ok('All done!')" + "if success:\n", + " utils.print_ok('Infrastructure creation completed successfully!')\n", + "else:\n", + " print(\"โŒ Infrastructure creation failed!\")\n", + " raise SystemExit(1)" ] }, { diff --git a/infrastructure/simple-apim/create_infrastructure.py b/infrastructure/simple-apim/create_infrastructure.py new file mode 100644 index 0000000..43c9452 --- /dev/null +++ b/infrastructure/simple-apim/create_infrastructure.py @@ -0,0 +1,243 @@ +""" +Infrastructure creation module for Simple APIM. + +This module provides a reusable way to create Simple APIM infrastructure +that can be called from notebooks or other scripts. +""" + +import sys +import os +import argparse +from pathlib import Path +import utils +from apimtypes import * +import json + +def _create_simple_apim_infrastructure( + rg_location: str = 'eastus2', + index: int | None = None, + apim_sku: APIM_SKU = APIM_SKU.BASICV2, + custom_apis: list[API] | None = None, + custom_policy_fragments: list[PolicyFragment] | None = None +) -> utils.Output: + """ + Create Simple APIM infrastructure with the specified parameters. + + Args: + rg_location (str): Azure region for deployment. Defaults to 'eastus2'. + index (int | None): Index for the infrastructure. Defaults to None (no index). + apim_sku (APIM_SKU): SKU for API Management. Defaults to BASICV2. + custom_apis (list[API] | None): Custom APIs to deploy. If None, uses default Hello World API. + custom_policy_fragments (list[PolicyFragment] | None): Custom policy fragments. If None, uses defaults. + + Returns: + utils.Output: The deployment result. + """ + + # 1) Setup deployment parameters + deployment = INFRASTRUCTURE.SIMPLE_APIM + rg_name = utils.get_infra_rg_name(deployment, index) + rg_tags = utils.build_infrastructure_tags(deployment) + + print(f"\n๐Ÿš€ Creating Simple APIM infrastructure...") + print(f" Location : {rg_location}") + print(f" Index : {index}") + print(f" Infrastructure : {deployment.value}") + print(f" APIM SKU : {apim_sku.value}") + print(f" Resource Group : {rg_name}\n") + + # 2) Set up the policy fragments + if custom_policy_fragments is None: + pfs: List[PolicyFragment] = [ + PolicyFragment('AuthZ-Match-All', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-all.xml')), 'Authorizes if all of the specified roles match the JWT role claims.'), + PolicyFragment('AuthZ-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-authz-match-any.xml')), 'Authorizes if any of the specified roles match the JWT role claims.'), + PolicyFragment('Http-Response-200', utils.read_policy_xml(utils.determine_shared_policy_path('pf-http-response-200.xml')), 'Returns a 200 OK response for the current HTTP method.'), + PolicyFragment('Product-Match-Any', utils.read_policy_xml(utils.determine_shared_policy_path('pf-product-match-any.xml')), 'Proceeds if any of the specified products match the context product name.'), + PolicyFragment('Remove-Request-Headers', utils.read_policy_xml(utils.determine_shared_policy_path('pf-remove-request-headers.xml')), 'Removes request headers from the incoming request.') + ] + else: + pfs = custom_policy_fragments + + # 3) Define the APIs + if custom_apis is None: + # Default Hello World API + pol_hello_world = utils.read_policy_xml(HELLO_WORLD_XML_POLICY_PATH) + api_hwroot_get = GET_APIOperation('This is a GET for API 1', pol_hello_world) + api_hwroot = API('hello-world', 'Hello World', '', 'This is the root API for Hello World', operations = [api_hwroot_get]) + apis: List[API] = [api_hwroot] + else: + apis = custom_apis + + # 4) Define the Bicep parameters with serialized APIs + # Define the Bicep parameters with serialized APIs + bicep_parameters = { + 'apimSku' : {'value': apim_sku.value}, + 'apis' : {'value': [api.to_dict() for api in apis]}, + 'policyFragments' : {'value': [pf.to_dict() for pf in pfs]} + } + + # Change to the infrastructure directory to ensure bicep files are found + original_cwd = os.getcwd() + infra_dir = Path(__file__).parent + + try: + os.chdir(infra_dir) + print(f"๐Ÿ“ Changed working directory to: {infra_dir}") + + # Prepare deployment parameters and run directly to avoid path detection issues + bicep_parameters_format = { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": bicep_parameters + } + + # Write the parameters file + params_file_path = infra_dir / 'params.json' + + with open(params_file_path, 'w') as file: + file.write(json.dumps(bicep_parameters_format)) + + print(f"๐Ÿ“ Updated the policy XML in the bicep parameters file 'params.json'") + + # ------------------------------ + # EXECUTE DEPLOYMENT + # ------------------------------ + + # Create the resource group if it doesn't exist + utils.create_resource_group(rg_name, rg_location, rg_tags) + + # Run the deployment directly + main_bicep_path = infra_dir / 'main.bicep' + output = utils.run( + f'az deployment group create --name {deployment.value} --resource-group {rg_name} --template-file "{main_bicep_path}" --parameters "{params_file_path}" --query "properties.outputs"', + f"Deployment '{deployment.value}' succeeded", + f"Deployment '{deployment.value}' failed." + ) + + # ------------------------------ + # VERIFY DEPLOYMENT RESULTS + # ------------------------------ + + if output.success: + print("\nโœ… Infrastructure creation completed successfully!") + if output.json_data: + apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL', suppress_logging = True) + apim_apis = output.getJson('apiOutputs', 'APIs', suppress_logging = True) + + print(f"\n๐Ÿ“‹ Infrastructure Details:") + print(f" Resource Group : {rg_name}") + print(f" Location : {rg_location}") + print(f" APIM SKU : {apim_sku.value}") + print(f" Gateway URL : {apim_gateway_url}") + print(f" APIs Created : {len(apim_apis)}") + + # Perform basic verification + _verify_infrastructure(rg_name, apim_gateway_url) + else: + print("โŒ Infrastructure creation failed!") + + return output + + finally: + # Always restore the original working directory + os.chdir(original_cwd) + print(f"๐Ÿ“ Restored working directory to: {original_cwd}") + +def _verify_infrastructure(rg_name: str, apim_gateway_url: str) -> bool: + """ + Verify that the infrastructure was created successfully. + + Args: + rg_name (str): Resource group name. + apim_gateway_url (str): API Management gateway Url. + + Returns: + bool: True if verification passed, False otherwise. + """ + + print("\n๐Ÿ” Verifying infrastructure...") + + try: + # Check if the resource group exists + if not utils.does_resource_group_exist(rg_name): + print("โŒ Resource group does not exist!") + return False + + print("โœ… Resource group verified") + + # Get APIM service details + output = utils.run(f'az apim list -g {rg_name} --query "[0]" -o json', print_command_to_run = False, print_errors = False) + + if output.success and output.json_data: + apim_name = output.json_data.get('name') + apim_gateway_url = output.json_data.get('gatewayUrl') + + print(f"โœ… APIM Service verified: {apim_name}") + + # Get API count + api_output = utils.run(f'az apim api list --service-name {apim_name} -g {rg_name} --query "length(@)"', + print_command_to_run = False, print_errors = False) + + if api_output.success: + api_count = int(api_output.text.strip()) + print(f"โœ… APIs verified: {api_count} API(s) created") + + # Test basic connectivity (optional) + if api_count > 0: + try: + # Get subscription key for testing + sub_output = utils.run(f'az apim subscription list --service-name {apim_name} -g {rg_name} --query "[0].primaryKey" -o tsv', + print_command_to_run = False, print_errors = False) + + if sub_output.success and sub_output.text.strip(): + print("โœ… Subscription key available for API testing") + except: + pass + + print("\n๐ŸŽ‰ Infrastructure verification completed successfully!") + return True + + else: + print("\nโŒ APIM service not found!") + return False + + except Exception as e: + print(f"\nโš ๏ธ Verification failed with error: {str(e)}") + return False + +def main(): + """ + Main entry point for command-line usage. + """ + + parser = argparse.ArgumentParser(description='Create Simple APIM infrastructure') + parser.add_argument('--location', default='eastus2', help='Azure region (default: eastus2)') + parser.add_argument('--index', type=int, help='Infrastructure index') + parser.add_argument('--sku', choices=['Basicv2', 'Standardv2', 'Premiumv2'], default='Basicv2', help='APIM SKU (default: Basicv2)') + + args = parser.parse_args() + + # Convert SKU string to enum + sku_map = { + 'Basicv2': APIM_SKU.BASICV2, + 'Standardv2': APIM_SKU.STANDARDV2, + 'Premiumv2': APIM_SKU.PREMIUMV2 + } + + try: + result = _create_simple_apim_infrastructure(rg_location = args.location, index = args.index, apim_sku = sku_map[args.sku]) + + if result.success: + print("\n๐ŸŽ‰ Infrastructure creation completed successfully!") + sys.exit(0) + else: + print("\n๐Ÿ’ฅ Infrastructure creation failed!") + sys.exit(1) + + except Exception as e: + print(f"\n๐Ÿ’ฅ Error: {str(e)}") + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/infrastructure/simple-apim/main.bicep b/infrastructure/simple-apim/main.bicep index 0f3595c..d5a0d9e 100644 --- a/infrastructure/simple-apim/main.bicep +++ b/infrastructure/simple-apim/main.bicep @@ -14,9 +14,6 @@ param apimSku string param apis array = [] param policyFragments array = [] -@description('Reveals the backend API information. Defaults to true. *** WARNING: This will expose backend API information to the caller - For learning & testing only! ***') -param revealBackendApiInfo bool = true - // ------------------ // RESOURCES // ------------------ @@ -47,7 +44,7 @@ module apimModule '../../shared/bicep/modules/apim/v1/apim.bicep' = { apimSku: apimSku appInsightsInstrumentationKey: appInsightsInstrumentationKey appInsightsId: appInsightsId - globalPolicyXml: revealBackendApiInfo ? loadTextContent('../../shared/apim-policies/all-apis-reveal-backend.xml') : loadTextContent('../../shared/apim-policies/all-apis.xml') + globalPolicyXml: loadTextContent('../../shared/apim-policies/all-apis.xml') } } diff --git a/samples/_TEMPLATE/create.ipynb b/samples/_TEMPLATE/create.ipynb index 82281a7..e1e742c 100644 --- a/samples/_TEMPLATE/create.ipynb +++ b/samples/_TEMPLATE/create.ipynb @@ -29,12 +29,7 @@ "tags = ['tag1', 'tag2', '...'] # ENTER DESCRIPTIVE TAG(S)\n", "api_prefix = '' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "sample_folder = \"_TEMPLATE\"\n", - "nb_helper = utils.NotebookHelper(sample_folder, rg_name, rg_location, deployment, [INFRASTRUCTURE.SIMPLE_APIM])\n", - "\n", - "# 3) Define the APIs and their operations and policies\n", + "# 2) Define the APIs and their operations and policies\n", "\n", "# API 1\n", "# api1_get = GET_APIOperation('This is a GET for API 1')\n", @@ -66,23 +61,26 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", + "# Build the bicep parameters\n", "bicep_parameters = {\n", " 'apis': {'value': [api.to_dict() for api in apis]}\n", "}\n", "\n", - "# 2) Deploy the bicep template\n", - "output = nb_helper.deploy_bicep(bicep_parameters)\n", + "# Create the deployment helper and deploy the sample\n", + "nb_helper = utils.NotebookHelper('_TEMPLATE', utils.get_infra_rg_name(deployment, index), rg_location, deployment, [INFRASTRUCTURE.SIMPLE_APIM], index = index)\n", + "output = nb_helper.deploy_sample(bicep_parameters)\n", "\n", - "if output.json_data:\n", + "if output.success:\n", + " # Extract deployment outputs for testing\n", " afd_endpoint_url = output.get('fdeSecureUrl', 'Front Door Endpoint URL') # may be deleted if Front Door is not part of a supported infrastructure\n", " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", " apim_apis = output.getJson('apiOutputs', 'APIs')\n", "\n", - "utils.print_ok('Deployment completed')" + " utils.print_ok('Deployment completed')\n", + "else:\n", + " print(\"โŒ Deployment failed!\")\n", + " raise SystemExit(1)" ] }, { @@ -102,36 +100,31 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", "from apimrequests import ApimRequests\n", "from apimtesting import ApimTesting\n", "\n", - "tests = ApimTesting(\"Simple APIM Tests\", deployment, deployment)\n", + "# Initialize testing framework\n", + "tests = ApimTesting(\"Template Sample Tests\", '_TEMPLATE', nb_helper.deployment)\n", "\n", - "# 1) Issue a direct request to API Management\n", + "# Example API testing (uncomment and customize as needed)\n", + "# api_subscription_key = apim_apis[0]['subscriptionPrimaryKey']\n", "\n", - "api_subscription_key = apim_apis[0]['subscriptionPrimaryKey']\n", - "reqsApim = ApimRequests(apim_gateway_url, api_subscription_key)\n", - "reqsAfd = ApimRequests(afd_endpoint_url, api_subscription_key) # may be deleted if Front Door is not part of a supported infrastructure\n", - "\n", - "# reqsApim.singleGet('/', msg = 'Calling Hello World (Root) API via API Management Gateway URL. Response codes 200 and 403 are both valid depending on the infrastructure used.')\n", - "\n", - "# # 2) Issue requests against Front Door.\n", - "# # Check if the infrastructure architecture deployment uses Azure Front Door.\n", - "# utils.print_message('Checking if the infrastructure architecture deployment uses Azure Front Door.', blank_above = True)\n", - "# afd_endpoint_url = utils.get_frontdoor_url(deployment, rg_name)\n", + "# Check if the infrastructure uses Azure Front Door\n", + "# utils.print_message('Checking infrastructure endpoint...', blank_above = True)\n", + "# afd_endpoint_url = utils.get_frontdoor_url(nb_helper.deployment, nb_helper.rg_name)\n", "\n", "# if afd_endpoint_url:\n", - "# reqsAfd = ApimRequests(afd_endpoint_url)\n", - "# reqsAfd.singleGet('/', msg = 'Calling Hello World (Root) API via via Azure Front Door. Expect 200.')\n", - "\n", - "# # 3) Unsuccessful call to Front Door without API subscription key (should fail with 401 Unauthorized)\n", - "# reqsNoApiSubscription = ApimRequests(afd_endpoint_url)\n", - "# output = reqsNoApiSubscription.singleGet('/', msg = 'Calling Hello World (Root) API without API subscription key. Expect 401.')\n", - "# outputJson = utils.get_json(output)\n", - "# tests.verify(outputJson['statusCode'], 401)\n", - "# tests.verify(outputJson['message'], 'Access denied due to missing subscription key. Make sure to include subscription key when making requests to an API.')\n", - "\n", + "# # Test via Azure Front Door\n", + "# reqsAfd = ApimRequests(afd_endpoint_url, api_subscription_key)\n", + "# output = reqsAfd.singleGet('/', msg = 'Calling API via Azure Front Door. Expect 200.')\n", + "# tests.verify('expected_value' in output, True)\n", + "# else:\n", + "# # Test via API Management directly\n", + "# reqsApim = ApimRequests(apim_gateway_url, api_subscription_key)\n", + "# output = reqsApim.singleGet('/', msg = 'Calling API via API Management Gateway.')\n", + "# tests.verify('expected_value' in output, True)\n", + "\n", + "# tests.print_summary()\n", "utils.print_ok('All done!')" ] } diff --git a/samples/authX-pro/create.ipynb b/samples/authX-pro/create.ipynb index eee2f25..360b918 100644 --- a/samples/authX-pro/create.ipynb +++ b/samples/authX-pro/create.ipynb @@ -4,11 +4,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "### Initialize notebook variables\n", "\n", "Configures everything that's needed for deployment. \n", "\n", - "๐Ÿ‘‰ **Modify entries under _1) User-defined parameters_ and _3) Define the APIs and their operations and policies_**." + "**Modify entries under _1) User-defined parameters_ and _2) Define the APIs and their operations and policies_**." ] }, { @@ -25,14 +25,14 @@ "index = 1\n", "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", "tags = ['authX-pro', 'jwt', 'policy-fragment'] # ENTER DESCRIPTIVE TAG(S)\n", - "api_prefix = 'authX-pro-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", + "api_prefix = 'authX-pro-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "sample_folder = \"authX-pro\"\n", - "nb_helper = utils.NotebookHelper(sample_folder, rg_name, rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], True)\n", + "# 2) Define the APIs and their operations and policies\n", "\n", - "# 3) Set up the named values\n", + "# Create the notebook helper with JWT support\n", + "nb_helper = utils.NotebookHelper('authX-pro', utils.get_infra_rg_name(deployment, index), rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], use_jwt = True, index = index)\n", + "\n", + "# Set up the named values\n", "nvs: List[NamedValue] = [\n", " NamedValue(nb_helper.jwt_key_name, nb_helper.jwt_key_value_bytes_b64, True),\n", " NamedValue('HRMemberRoleId', Role.HR_MEMBER),\n", @@ -40,23 +40,21 @@ " NamedValue('HRAdministratorRoleId', Role.HR_ADMINISTRATOR)\n", "]\n", "\n", - "# 4) Set up the policy fragments\n", + "# Set up the policy fragments\n", "pf_authx_hr_member_xml = utils.read_policy_xml('pf-authx-hr-member.xml', {\n", " 'jwt_signing_key': nb_helper.jwt_key_name,\n", " 'hr_member_role_id': 'HRMemberRoleId'\n", - "}, sample_folder)\n", + "}, 'authX-pro')\n", "\n", "pfs: List[PolicyFragment] = [\n", " PolicyFragment('AuthX-HR-Member', pf_authx_hr_member_xml, 'Authenticates and authorizes HR members.')\n", "]\n", "\n", - "# 5) Define the Products\n", - "\n", - "# HR Product with authentication policy, including authorization via a required claim check for HR member role\n", + "# Define the Products\n", "pol_hr_product = utils.read_policy_xml('hr_product.xml', {\n", " 'jwt_signing_key': nb_helper.jwt_key_name, \n", " 'hr_member_role_id': 'HRMemberRoleId'\n", - "}, sample_folder)\n", + "}, 'authX-pro')\n", "\n", "hr_product_name = 'hr'\n", "products: List[Product] = [\n", @@ -65,21 +63,19 @@ " 'published', True, False, pol_hr_product)\n", "]\n", "\n", - "# 6) Define the APIs and their operations and policies\n", - "\n", - "# Policies\n", - "pol_hr_all_operations_pro = utils.read_policy_xml('hr_all_operations_pro.xml', sample_name = sample_folder)\n", - "pol_hr_get = utils.read_policy_xml('hr_get.xml', sample_name = sample_folder)\n", - "pol_hr_post = utils.read_policy_xml('hr_post.xml', sample_name = sample_folder)\n", + "# Define the APIs and their operations and policies\n", + "pol_hr_all_operations_pro = utils.read_policy_xml('hr_all_operations_pro.xml', sample_name = 'authX-pro')\n", + "pol_hr_get = utils.read_policy_xml('hr_get.xml', sample_name = 'authX-pro')\n", + "pol_hr_post = utils.read_policy_xml('hr_post.xml', sample_name = 'authX-pro')\n", "\n", - "# Employees (HR)\n", + "# API 1: Employees (HR)\n", "hr_employees_api_path = f'/{api_prefix}employees'\n", "hr_employees_get = GET_APIOperation('Gets the employees', pol_hr_get,)\n", "hr_employees_post = POST_APIOperation('Creates a new employee', pol_hr_post)\n", "hr_employees = API(f'{api_prefix}Employees', 'Employees Pro', hr_employees_api_path, 'This is a Human Resources API for employee information', pol_hr_all_operations_pro,\n", " operations = [hr_employees_get, hr_employees_post], tags = tags, productNames = [hr_product_name], subscriptionRequired = False)\n", "\n", - "# Benefits (HR)\n", + "# API 2: Benefits (HR)\n", "hr_benefits_api_path = f'/{api_prefix}benefits'\n", "hr_benefits_get = GET_APIOperation('Gets employee benefits', pol_hr_get)\n", "hr_benefits_post = POST_APIOperation('Creates employee benefits', pol_hr_post)\n", @@ -96,7 +92,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", + "### Create deployment using Bicep\n", "\n", "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." ] @@ -107,9 +103,7 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", + "# Build the bicep parameters\n", "bicep_parameters = {\n", " 'apis': {'value': [api.to_dict() for api in apis]},\n", " 'namedValues': {'value': [nv.to_dict() for nv in nvs]},\n", @@ -117,22 +111,26 @@ " 'products': {'value': [product.to_dict() for product in products]}\n", "}\n", "\n", - "# 2) Deploy the bicep template\n", - "output = nb_helper.deploy_bicep(bicep_parameters)\n", + "# Deploy the sample\n", + "output = nb_helper.deploy_sample(bicep_parameters)\n", "\n", - "if output.json_data:\n", + "if output.success:\n", + " # Extract deployment outputs for testing\n", " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", " apim_products = output.getJson('productOutputs', 'Products')\n", "\n", - "utils.print_ok('Deployment completed')" + " print(f\"โœ… Sample deployment completed successfully!\")\n", + "else:\n", + " print(\"โŒ Sample deployment failed!\")\n", + " raise SystemExit(1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### โœ… 3. Verify API Request Success\n", + "### Verify API Request Success\n", "\n", "Assert that the deployment was successful by making simple calls to APIM. \n", "\n", diff --git a/samples/authX/create.ipynb b/samples/authX/create.ipynb index 6be32fd..23fa31c 100644 --- a/samples/authX/create.ipynb +++ b/samples/authX/create.ipynb @@ -4,11 +4,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "### Initialize notebook variables\n", "\n", "Configures everything that's needed for deployment. \n", "\n", - "๐Ÿ‘‰ **Modify entries under _1) User-defined parameters_ and _3) Define the APIs and their operations and policies_**." + "**Modify entries under _1) User-defined parameters_ and _2) Define the APIs and their operations and policies_**." ] }, { @@ -25,30 +25,28 @@ "index = 1\n", "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", "tags = ['authX', 'jwt', 'hr'] # ENTER DESCRIPTIVE TAG(S)\n", - "api_prefix = 'authX-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", + "api_prefix = 'authX-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "sample_folder = \"authX\"\n", - "nb_helper = utils.NotebookHelper(sample_folder, rg_name, rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], True)\n", + "# 2) Define the APIs and their operations and policies\n", "\n", - "# 3) Define the APIs and their operations and policies\n", + "# Create the notebook helper with JWT support\n", + "nb_helper = utils.NotebookHelper('authX', utils.get_infra_rg_name(deployment, index), rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], use_jwt = True, index = index)\n", "\n", "# Policies\n", "# Named values must be set up a bit differently as they need to have two surrounding curly braces\n", - "pol_hr_all_operations = utils.read_policy_xml('hr_all_operations.xml', sample_name = sample_folder).format(\n", + "pol_hr_all_operations = utils.read_policy_xml('hr_all_operations.xml', sample_name = 'authX').format(\n", " jwt_signing_key = '{{' + nb_helper.jwt_key_name + '}}', \n", " hr_member_role_id = '{{HRMemberRoleId}}'\n", ")\n", - "pol_hr_get = utils.read_policy_xml('hr_get.xml', sample_name = sample_folder).format(\n", + "pol_hr_get = utils.read_policy_xml('hr_get.xml', sample_name = 'authX').format(\n", " hr_administrator_role_id = '{{HRAdministratorRoleId}}',\n", " hr_associate_role_id = '{{HRAssociateRoleId}}'\n", ")\n", - "pol_hr_post = utils.read_policy_xml('hr_post.xml', sample_name = sample_folder).format(\n", + "pol_hr_post = utils.read_policy_xml('hr_post.xml', sample_name = 'authX').format(\n", " hr_administrator_role_id = '{{HRAdministratorRoleId}}'\n", ")\n", "\n", - "# Employees (HR)\n", + "# API 1: Employees (HR)\n", "hr_employees_get = GET_APIOperation('Gets the employees', pol_hr_get)\n", "hr_employees_post = POST_APIOperation('Creates a new employee', pol_hr_post)\n", "hr_employees = API('Employees', 'Employees', '/employees', 'This is a Human Resources API to obtain employee information', pol_hr_all_operations, operations = [hr_employees_get, hr_employees_post], tags = tags, subscriptionRequired = True)\n", @@ -56,7 +54,7 @@ "# APIs Array\n", "apis: List[API] = [hr_employees]\n", "\n", - "# 4) Set up the named values\n", + "# Set up the named values\n", "nvs: List[NamedValue] = [\n", " NamedValue(nb_helper.jwt_key_name, nb_helper.jwt_key_value_bytes_b64, True),\n", " NamedValue('HRMemberRoleId', Role.HR_MEMBER),\n", @@ -71,7 +69,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", + "### Create deployment using Bicep\n", "\n", "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." ] @@ -82,30 +80,32 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", + "# Build the bicep parameters\n", "bicep_parameters = {\n", " 'apis': {'value': [api.to_dict() for api in apis]},\n", " 'namedValues': {'value': [nv.to_dict() for nv in nvs]}\n", "}\n", "\n", - "# 2) Deploy the bicep template\n", - "output = nb_helper.deploy_bicep(bicep_parameters)\n", + "# Deploy the sample\n", + "output = nb_helper.deploy_sample(bicep_parameters)\n", "\n", - "if output.json_data:\n", + "if output.success:\n", + " # Extract deployment outputs for testing\n", " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", " apim_apis = output.getJson('apiOutputs', 'APIs')\n", "\n", - "utils.print_ok('Deployment completed')" + " print(f\"โœ… Sample deployment completed successfully!\")\n", + "else:\n", + " print(\"โŒ Sample deployment failed!\")\n", + " raise SystemExit(1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### โœ… 3. Verify API Request Success\n", + "### Verify API Request Success\n", "\n", "Assert that the deployment was successful by making simple calls to APIM. \n", "\n", @@ -118,77 +118,57 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", "from apimrequests import ApimRequests\n", "from apimtesting import ApimTesting\n", "from apimtypes import Role\n", "from users import UserHelper\n", "from authfactory import AuthFactory\n", "\n", - "tests = ApimTesting(\"AuthX Sample Tests\", sample_folder, deployment)\n", - "\n", + "# Initialize testing framework\n", + "tests = ApimTesting(\"AuthX Sample Tests\", 'authX', nb_helper.deployment)\n", "hr_api_apim_subscription_key = apim_apis[0]['subscriptionPrimaryKey']\n", "\n", - "# Preflight: Check if the infrastructure architecture deployment uses Azure Front Door. If so, assume that APIM is not directly accessible and use the Front Door URL instead.\n", - "endpoint_url = utils.test_url_preflight_check(deployment, rg_name, apim_gateway_url)\n", + "# Check infrastructure endpoint\n", + "utils.print_message('Checking infrastructure endpoint...', blank_above = True)\n", + "afd_endpoint_url = utils.get_frontdoor_url(nb_helper.deployment, nb_helper.rg_name)\n", + "endpoint_url = afd_endpoint_url if afd_endpoint_url else apim_gateway_url\n", "\n", - "# 1) HR Administrator\n", - "# Create a JSON Web Token with a payload and sign it with the symmetric key from above.\n", + "# 1) HR Administrator - Full access\n", "encoded_jwt_token_hr_admin = AuthFactory.create_symmetric_jwt_token_for_user(UserHelper.get_user_by_role(Role.HR_ADMINISTRATOR), nb_helper.jwt_key_value)\n", - "print(f'\\nJWT token for HR Admin:\\n{encoded_jwt_token_hr_admin}') # this value is used to call the APIs via APIM\n", + "print(f'\\nJWT token for HR Admin:\\n{encoded_jwt_token_hr_admin}')\n", "\n", - "# Set up an APIM requests object with the JWT token\n", "reqsApimAdmin = ApimRequests(endpoint_url, hr_api_apim_subscription_key)\n", "reqsApimAdmin.headers['Authorization'] = f'Bearer {encoded_jwt_token_hr_admin}'\n", "\n", - "# Call APIM\n", - "output = reqsApimAdmin.singleGet('/employees', msg = 'Calling GET Employees API via API Management Gateway URL. Expect 200.')\n", + "output = reqsApimAdmin.singleGet('/employees', msg = 'Calling GET Employees API as HR Admin. Expect 200.')\n", "tests.verify(output, 'Returning a mock employee')\n", "\n", - "output = reqsApimAdmin.singlePost('/employees', msg = 'Calling POST Employees API via API Management Gateway URL. Expect 200.')\n", + "output = reqsApimAdmin.singlePost('/employees', msg = 'Calling POST Employees API as HR Admin. Expect 200.')\n", "tests.verify(output, 'A mock employee has been created.')\n", "\n", - "# 2) HR Associate\n", - "# Create a JSON Web Token with a payload and sign it with the symmetric key from above.\n", + "# 2) HR Associate - Read-only access\n", "encoded_jwt_token_hr_associate = AuthFactory.create_symmetric_jwt_token_for_user(UserHelper.get_user_by_role(Role.HR_ASSOCIATE), nb_helper.jwt_key_value)\n", - "print(f'\\nJWT token for HR Associate:\\n{encoded_jwt_token_hr_associate}') # this value is used to call the APIs via APIM\n", + "print(f'\\nJWT token for HR Associate:\\n{encoded_jwt_token_hr_associate}')\n", "\n", - "# Set up an APIM requests object with the JWT token\n", "reqsApimAssociate = ApimRequests(endpoint_url, hr_api_apim_subscription_key)\n", "reqsApimAssociate.headers['Authorization'] = f'Bearer {encoded_jwt_token_hr_associate}'\n", "\n", - "# Call APIM\n", - "output = reqsApimAssociate.singleGet('/employees', msg = 'Calling GET Employees API via API Management Gateway URL. Expect 200.')\n", + "output = reqsApimAssociate.singleGet('/employees', msg = 'Calling GET Employees API as HR Associate. Expect 200.')\n", "tests.verify(output, 'Returning a mock employee')\n", "\n", - "output = reqsApimAssociate.singlePost('/employees', msg = 'Calling POST Employees API via API Management Gateway URL. Expect 403.')\n", - "# The return value is not good enough, but checking for an empty string is the best we can do for now.\n", + "output = reqsApimAssociate.singlePost('/employees', msg = 'Calling POST Employees API as HR Associate. Expect 403.')\n", "tests.verify(output, '')\n", "\n", - "# 3) HR Administrator but no API subscription key (api-key)\n", - "# Set up an APIM requests object with the JWT token\n", - "reqsApimAdminNoApiSubscription = ApimRequests(endpoint_url)\n", - "reqsApimAdminNoApiSubscription.headers['Authorization'] = f'Bearer {encoded_jwt_token_hr_admin}'\n", - "\n", - "# Call APIM\n", - "output = reqsApimAdminNoApiSubscription.singleGet('/employees', msg = 'Calling GET Employees API via API Management Gateway URL without API subscription key. Expect 401.')\n", - "outputJson = utils.get_json(output)\n", - "tests.verify(outputJson['statusCode'], 401)\n", - "tests.verify(outputJson['message'], 'Access denied due to missing subscription key. Make sure to include subscription key when making requests to an API.')\n", + "# 3) Missing API subscription key\n", + "reqsNoApiSubscription = ApimRequests(endpoint_url)\n", + "reqsNoApiSubscription.headers['Authorization'] = f'Bearer {encoded_jwt_token_hr_admin}'\n", "\n", - "# 4) HR Associate but no API subscription key (api-key)\n", - "# Set up an APIM requests object with the JWT token\n", - "reqsApimAssociateNoHrProduct = ApimRequests(endpoint_url)\n", - "reqsApimAssociateNoHrProduct.headers['Authorization'] = f'Bearer {encoded_jwt_token_hr_associate}'\n", - "\n", - "# Call APIM\n", - "output = reqsApimAssociateNoHrProduct.singleGet('/employees', msg = 'Calling GET Employees API via API Management Gateway URL without API subscription key. Expect 401.')\n", + "output = reqsNoApiSubscription.singleGet('/employees', msg = 'Calling GET Employees API without API subscription key. Expect 401.')\n", "outputJson = utils.get_json(output)\n", "tests.verify(outputJson['statusCode'], 401)\n", "tests.verify(outputJson['message'], 'Access denied due to missing subscription key. Make sure to include subscription key when making requests to an API.')\n", "\n", "tests.print_summary()\n", - "\n", "utils.print_ok('All done!')" ] } diff --git a/samples/azure-maps/create.ipynb b/samples/azure-maps/create.ipynb index 332f1c4..35db64c 100644 --- a/samples/azure-maps/create.ipynb +++ b/samples/azure-maps/create.ipynb @@ -4,11 +4,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "## ๐Ÿ› ๏ธ Initialize Sample Environment\n", "\n", - "Configures everything that's needed for deployment. \n", + "This Azure Maps APIM sample demonstrates backend integration and authentication patterns with Azure Maps APIs.\n", "\n", - "**Modify entries under _1) User-defined parameters_ and _3) Define the APIs and their operations and policies_**." + "**Configuration required:**\n", + "- Review and modify the parameters in the initialization cell below\n", + "- Ensure you have appropriate Azure Maps resources available for testing" ] }, { @@ -17,54 +19,72 @@ "metadata": {}, "outputs": [], "source": [ + "# Initialize notebook helper and configuration\n", "import utils\n", "from apimtypes import *\n", "\n", - "# 1) User-defined parameters (change these as needed)\n", - "rg_location = 'eastus2'\n", - "index = 1\n", - "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", - "tags = ['azure-maps'] # ENTER DESCRIPTIVE TAG(S)\n", - "api_prefix = 'am-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", - "azure_maps_url = 'https://atlas.microsoft.com' # OPTIONAL: ENTER THE AZURE MAPS URL IF DIFFERENT FROM DEFAULT\n", - "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "sample_folder = \"azure-maps\"\n", - "nb_helper = utils.NotebookHelper(sample_folder, rg_name, rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM])\n", + "# ------------------------------\n", + "# USER CONFIGURATION\n", + "# ------------------------------\n", "\n", - "# 3) Define the APIs and their operations and policies\n", - "\n", - "# Policies\n", - "# Named values must be set up a bit differently as they need to have two surrounding curly braces\n", + "# Infrastructure settings\n", + "rg_location = 'eastus2'\n", + "index = 1\n", + "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", + "tags = ['azure-maps']\n", + "api_prefix = 'am-'\n", + "azure_maps_url = 'https://atlas.microsoft.com'\n", + "\n", + "# ------------------------------\n", + "# SAMPLE SETUP\n", + "# ------------------------------\n", + "\n", + "sample_folder = 'azure-maps'\n", + "rg_name = utils.get_infra_rg_name(deployment, index)\n", + "nb_helper = utils.NotebookHelper(\n", + " sample_folder, \n", + " rg_name, \n", + " rg_location, \n", + " deployment,\n", + " [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM]\n", + ")\n", + "\n", + "# ------------------------------\n", + "# API CONFIGURATION\n", + "# ------------------------------\n", + "\n", + "# Load policy definitions\n", "pol_map_async_geocode_batch_v1_keyauth_post = utils.read_policy_xml('map_async_geocode_batch_v1_keyauth_post.xml', sample_name=sample_folder)\n", - "pol_map_default_route_v2_aad_get = utils.read_policy_xml('map_default_route_v2_aad_get.xml', sample_name=sample_folder)\n", - "pol_map_geocode_v2_aad_get = utils.read_policy_xml('map_geocode_v2_aad_get.xml', sample_name=sample_folder)\n", + "pol_map_default_route_v2_aad_get = utils.read_policy_xml('map_default_route_v2_aad_get.xml', sample_name=sample_folder)\n", + "pol_map_geocode_v2_aad_get = utils.read_policy_xml('map_geocode_v2_aad_get.xml', sample_name=sample_folder)\n", "\n", - "# Map API \n", - "mapApi_v2_default_get = GET_APIOperation2('get-default-route','Get default route','/default/*','This is the default route that will allow all requests to go through to the backend api',pol_map_default_route_v2_aad_get)\n", - "mapApi_v1_async_post = APIOperation('async-geocode-batch','Async Geocode Batch','/geocode/batch/async',HTTP_VERB.POST, 'Post geocode batch async endpoint',pol_map_async_geocode_batch_v1_keyauth_post)\n", - "mapApi_v2_geocode_get = GET_APIOperation2('get-geocode','Get Geocode','/geocode','Get geocode endpoint',pol_map_geocode_v2_aad_get)\n", - "api1 = API('map-api', 'Map API', '/map', 'This is the proxy for Azure Maps', operations=[mapApi_v2_default_get, mapApi_v1_async_post,mapApi_v2_geocode_get], tags = tags, serviceUrl=azure_maps_url)\n", + "# Define API operations\n", + "mapApi_v2_default_get = GET_APIOperation2('get-default-route', 'Get default route', '/default/*', 'This is the default route that will allow all requests to go through to the backend api', pol_map_default_route_v2_aad_get)\n", + "mapApi_v1_async_post = APIOperation('async-geocode-batch', 'Async Geocode Batch', '/geocode/batch/async', HTTP_VERB.POST, 'Post geocode batch async endpoint', pol_map_async_geocode_batch_v1_keyauth_post)\n", + "mapApi_v2_geocode_get = GET_APIOperation2('get-geocode', 'Get Geocode', '/geocode', 'Get geocode endpoint', pol_map_geocode_v2_aad_get)\n", + "\n", + "# API 1: Maps\n", + "api1 = API('map-api', 'Map API', '/map', 'This is the proxy for Azure Maps', \n", + " operations=[mapApi_v2_default_get, mapApi_v1_async_post, mapApi_v2_geocode_get], \n", + " tags=tags, serviceUrl=azure_maps_url)\n", "\n", - "# APIs Array\n", "apis: List[API] = [api1]\n", "\n", - "# 4) Set up the named values, for this specific sample, we are using some of the named values in the API policies defined above that can't be known at this point in the process. For those named values, we are setting them in the main.bicep file.\n", + "# Define named values\n", "nvs: List[NamedValue] = [\n", - " NamedValue('azure-maps-arm-api-version','2023-06-01')\n", + " NamedValue('azure-maps-arm-api-version', '2023-06-01')\n", "]\n", "\n", - "utils.print_ok('Notebook initialized')" + "utils.print_ok('โœ… Azure Maps sample initialized')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", + "## ๐Ÿš€ Deploy Infrastructure and APIs\n", "\n", - "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." + "Deploy the sample configuration to Azure using Bicep templates." ] }, { @@ -73,34 +93,29 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", + "# Deploy the infrastructure and APIs\n", "bicep_parameters = {\n", " 'apis': {'value': [api.to_dict() for api in apis]},\n", " 'namedValues': {'value': [nv.to_dict() for nv in nvs]}\n", "}\n", "\n", - "# 2) Deploy the bicep template\n", "output = nb_helper.deploy_bicep(bicep_parameters)\n", "\n", "if output.json_data:\n", - " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", + " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", - " apim_apis = output.getJson('apiOutputs', 'APIs')\n", + " apim_apis = output.getJson('apiOutputs', 'APIs')\n", "\n", - "utils.print_ok('Deployment completed')" + "utils.print_ok('โœ… Deployment completed successfully')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### โœ… 3. Verify API Request Success\n", + "## โœ… Verify and Test APIs\n", "\n", - "Assert that the deployment was successful by making simple calls to APIM. \n", - "\n", - "โ—๏ธ If the infrastructure shields APIM and requires a different ingress (e.g. Azure Front Door), the request to the APIM gateway URl will fail by design. Obtain the Front Door endpoint hostname and try that instead." + "Test the deployed APIs to confirm successful configuration and backend connectivity." ] }, { @@ -109,25 +124,31 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", + "# Test and verify the deployed APIs\n", + "import json\n", "from apimtesting import ApimTesting\n", "from apimrequests import ApimRequests\n", - "import json\n", "\n", "tests = ApimTesting(\"Azure Maps Sample Tests\", sample_folder, deployment)\n", "\n", - "# Preflight: Check if the infrastructure architecture deployment uses Azure Front Door. If so, assume that APIM is not directly accessible and use the Front Door URL instead.\n", + "# Get the appropriate endpoint URL for testing\n", "endpoint_url = utils.test_url_preflight_check(deployment, rg_name, apim_gateway_url)\n", - "\n", "reqs = ApimRequests(endpoint_url, apim_apis[0]['subscriptionPrimaryKey'])\n", "\n", - "# 1) Issue requests to API Management with Azure Maps APIs\n", - "output = reqs.singleGet('/map/default/geocode?query=15127%20NE%2024th%20Street%20Redmond%20WA', msg = 'Calling Default Route API with SAS Token Auth. Expect 200.')\n", + "# Test Azure Maps API endpoints\n", + "utils.print_info(\"Testing Azure Maps API operations...\")\n", + "\n", + "# Test default route with SAS token auth\n", + "output = reqs.singleGet('/map/default/geocode?query=15127%20NE%2024th%20Street%20Redmond%20WA', \n", + " msg='Calling Default Route API with SAS Token Auth. Expect 200.')\n", "tests.verify('address' in output, True)\n", "\n", - "output = reqs.singleGet('/map/geocode?query=15127%20NE%2024th%20Street%20Redmond%20WA', msg = 'Calling Geocode v2 API with AAD Auth. Expect 200.')\n", + "# Test geocode v2 with AAD auth\n", + "output = reqs.singleGet('/map/geocode?query=15127%20NE%2024th%20Street%20Redmond%20WA', \n", + " msg='Calling Geocode v2 API with AAD Auth. Expect 200.')\n", "tests.verify('address' in output, True)\n", "\n", + "# Test async geocode batch with shared key auth\n", "output = reqs.singlePostAsync('/map/geocode/batch/async', data={\n", " \"batchItems\": [\n", " {\"query\": \"?query=400 Broad St, Seattle, WA 98109&limit=3\"},\n", @@ -136,21 +157,23 @@ " {\"query\": \"?query=Pike Pl, Seattle, WA 98101&lat=47.610970&lon=-122.342469&radius=1000\"},\n", " {\"query\": \"?query=Champ de Mars, 5 Avenue Anatole France, 75007 Paris, France&limit=1\"}\n", " ]\n", - "}, msg = 'Calling Async Geocode Batch v1 API with Share Key Auth. Expect initial 202, then a 200 on the polling response', timeout=120, poll_interval=3)\n", + "}, msg='Calling Async Geocode Batch v1 API with Share Key Auth. Expect initial 202, then a 200 on the polling response', \n", + "timeout=120, poll_interval=3)\n", "\n", - "# confirm the response contains \"summary\": { \"successfulRequests\": 5, \"totalRequests\": 5}\n", - "tests.verify('summary' in output and 'successfulRequests' in output and json.loads(output)['summary']['successfulRequests'] == 5, True)\n", + "# Verify batch response contains successful requests\n", + "tests.verify('summary' in output and 'successfulRequests' in output and \n", + " json.loads(output)['summary']['successfulRequests'] == 5, True)\n", "\n", - "# 2) Unsuccessful call without API subscription key (should fail with 401 Unauthorized)\n", + "# Test unauthorized access (should fail with 401)\n", "reqsNoApiSubscription = ApimRequests(endpoint_url)\n", - "output = reqsNoApiSubscription.singleGet('/map/geocode?query=15127%20NE%2024th%20Street%20Redmond%20WA', msg = 'Calling Geocode v2 API without API subscription key. Expect 401.')\n", + "output = reqsNoApiSubscription.singleGet('/map/geocode?query=15127%20NE%2024th%20Street%20Redmond%20WA', \n", + " msg='Calling Geocode v2 API without API subscription key. Expect 401.')\n", "outputJson = utils.get_json(output)\n", "tests.verify(outputJson['statusCode'], 401)\n", "tests.verify(outputJson['message'], 'Access denied due to missing subscription key. Make sure to include subscription key when making requests to an API.')\n", "\n", "tests.print_summary()\n", - "\n", - "utils.print_ok('All done!')" + "utils.print_ok('โœ… All tests completed successfully!')" ] } ], diff --git a/samples/general/create.ipynb b/samples/general/create.ipynb index a4887c0..5323508 100644 --- a/samples/general/create.ipynb +++ b/samples/general/create.ipynb @@ -6,7 +6,9 @@ "source": [ "### Initialize notebook variables\n", "\n", - "Configures everything that's needed for deployment. **Modify entries under _1) User-defined parameters_ and _3) Define the APIs and their operations and policies_**." + "Configures everything that's needed for deployment. \n", + "\n", + "**Modify entries under _1) User-defined parameters_ and _2) Define the APIs and their operations and policies_**." ] }, { @@ -24,23 +26,18 @@ "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", "tags = ['general']\n", "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "sample_folder = \"general\"\n", - "nb_helper = utils.NotebookHelper(sample_folder, rg_name, rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM])\n", - "\n", - "# 3) Define the APIs and their operations and policies\n", + "# 2) Define the APIs and their operations and policies\n", "\n", "# API 1\n", - "api1_get = GET_APIOperation('This is a GET for API 1')\n", + "api1_get = GET_APIOperation('This is a GET for API 1')\n", "api1_post = POST_APIOperation('This is a POST for API 1')\n", - "api1 = API('API1', 'API 1', '/api1', 'This is API 1', operations = [api1_get, api1_post], tags = tags)\n", + "api1 = API('API1', 'API 1', '/api1', 'This is API 1', operations = [api1_get, api1_post], tags = tags)\n", "\n", "# API 2\n", "api2_post = POST_APIOperation('This is a POST for API 2')\n", - "api2 = API('API2', 'API 2', '/api2', 'This is API 2', operations = [api2_post], tags = tags)\n", + "api2 = API('API2', 'API 2', '/api2', 'This is API 2', operations = [api2_post], tags = tags)\n", "\n", - "# Request Headers\n", + "# API 3: Request Headers\n", "pol_request_headers_get = utils.read_policy_xml(REQUEST_HEADERS_XML_POLICY_PATH)\n", "request_headers_get = GET_APIOperation('Gets the request headers for the current request and returns them. Great for troubleshooting.', pol_request_headers_get)\n", "request_headers = API('requestheaders', 'Request Headers', '/request-headers', 'API for request headers', operations = [request_headers_get], tags = tags)\n", @@ -66,18 +63,24 @@ "metadata": {}, "outputs": [], "source": [ - "# 1) Define the Bicep parameters with serialized APIs\n", + "# Build the bicep parameters\n", "bicep_parameters = {\n", - " 'apis' : { 'value': [api.to_dict() for api in apis] }\n", + " 'apis': {'value': [api.to_dict() for api in apis]}\n", "}\n", "\n", - "# 2) Deploy the bicep template\n", - "output = nb_helper.deploy_bicep(bicep_parameters)\n", + "# Create the deployment helper and deploy the sample\n", + "nb_helper = utils.NotebookHelper('general', utils.get_infra_rg_name(deployment, index), rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], index = index)\n", + "output = nb_helper.deploy_sample(bicep_parameters)\n", "\n", - "if output.json_data:\n", - " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", - " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", - " apim_apis = output.getJson('apiOutputs', 'APIs')" + "if output.success:\n", + " # Extract deployment outputs for testing\n", + " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM Gateway URL')\n", + " apim_apis = output.getJson('apiOutputs', 'APIs')\n", + "\n", + " print(f\"โœ… Sample deployment completed successfully!\")\n", + "else:\n", + " print(\"โŒ Sample deployment failed!\")\n", + " raise SystemExit(1)" ] }, { @@ -97,39 +100,38 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", "from apimrequests import ApimRequests\n", "from apimtesting import ApimTesting\n", "\n", - "tests = ApimTesting(\"General Sample Tests\", sample_folder, deployment)\n", - "\n", + "# Initialize testing framework\n", + "tests = ApimTesting(\"General Sample Tests\", 'general', nb_helper.deployment)\n", "api_subscription_key = apim_apis[2]['subscriptionPrimaryKey']\n", "\n", - "# Check if the infrastructure architecture deployment uses Azure Front Door.\n", - "utils.print_message('Checking if the infrastructure architecture deployment uses Azure Front Door.', blank_above = True)\n", - "afd_endpoint_url = utils.get_frontdoor_url(deployment, rg_name)\n", + "# Check if the infrastructure uses Azure Front Door\n", + "utils.print_message('Checking infrastructure endpoint...', blank_above = True)\n", + "afd_endpoint_url = utils.get_frontdoor_url(nb_helper.deployment, nb_helper.rg_name)\n", "\n", "if afd_endpoint_url:\n", + " # Test via Azure Front Door\n", " reqsAfd = ApimRequests(afd_endpoint_url, api_subscription_key)\n", - " output = reqsAfd.singleGet('/request-headers', msg = 'Calling Request Headers API via via Azure Front Door. Expect 200.')\n", + " output = reqsAfd.singleGet('/request-headers', msg = 'Calling Request Headers API via Azure Front Door. Expect 200.')\n", " tests.verify('Host:' in output, True)\n", "else:\n", - " # Issue a direct request to API Management\n", + " # Test via API Management directly\n", " reqsApim = ApimRequests(apim_gateway_url, api_subscription_key)\n", - " output = reqsApim.singleGet('/request-headers', msg = 'Calling Request Headers API via API Management Gateway URL. Response codes 200 and 403 are both valid depending on the infrastructure used.')\n", + " output = reqsApim.singleGet('/request-headers', msg = 'Calling Request Headers API via API Management Gateway. Response codes 200 and 403 are both valid.')\n", " tests.verify('Host:' in output, True)\n", "\n", "tests.print_summary()\n", - "\n", "utils.print_ok('All done!')" ] } ], "metadata": { "kernelspec": { - "display_name": "APIM Samples Python 3.12", + "display_name": ".venv (3.12.10)", "language": "python", - "name": "apim-samples" + "name": "python3" }, "language_info": { "codemirror_mode": { diff --git a/samples/load-balancing/create.ipynb b/samples/load-balancing/create.ipynb index 296cfde..074afe7 100644 --- a/samples/load-balancing/create.ipynb +++ b/samples/load-balancing/create.ipynb @@ -4,11 +4,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "## ๐Ÿ› ๏ธ Initialize Sample Environment\n", "\n", - "Configures everything that's needed for deployment. \n", + "This load balancing APIM sample demonstrates backend pool configuration with prioritized and weighted distribution patterns.\n", "\n", - "**Modify entries under _1) User-defined parameters_ and _3) Define the APIs and their operations and policies_**." + "**Configuration required:**\n", + "- Review and modify the parameters in the initialization cell below\n", + "- This sample uses Azure Container Apps infrastructure with multiple backend pools" ] }, { @@ -24,44 +26,49 @@ "rg_location = 'eastus2'\n", "index = 1\n", "deployment = INFRASTRUCTURE.APIM_ACA\n", - "tags = ['load-balancing'] # [ENTER DESCRIPTIVE TAG(S)]\n", - "api_prefix = 'lb-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", + "tags = ['load-balancing'] # ENTER DESCRIPTIVE TAG(S)\n", + "api_prefix = 'lb-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "sample_folder = \"load-balancing\"\n", - "nb_helper = utils.NotebookHelper(sample_folder, rg_name, rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA])\n", + "# 2) Define the APIs and their operations and policies\n", + "sample_folder = 'load-balancing'\n", + "nb_helper = utils.NotebookHelper(sample_folder, utils.get_infra_rg_name(deployment, index), rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA])\n", "\n", - "# 3) Define the APIs and their operations and policies\n", + "# Load and configure backend pool policies\n", + "pol_aca_backend_pool_load_balancing = utils.read_policy_xml('aca-backend-pool-load-balancing.xml', sample_name = sample_folder)\n", + "pol_aca_backend_pool_prioritized = pol_aca_backend_pool_load_balancing.format(retry_count = 1, backend_id = 'aca-backend-pool-web-api-429-prioritized')\n", + "pol_aca_backend_pool_prioritized_and_weighted = pol_aca_backend_pool_load_balancing.format(retry_count = 2, backend_id = 'aca-backend-pool-web-api-429-prioritized-and-weighted')\n", + "pol_aca_backend_pool_weighted_equal = pol_aca_backend_pool_load_balancing.format(retry_count = 1, backend_id = 'aca-backend-pool-web-api-429-weighted-50-50')\n", + "pol_aca_backend_pool_weighted_unequal = pol_aca_backend_pool_load_balancing.format(retry_count = 1, backend_id = 'aca-backend-pool-web-api-429-weighted-80-20')\n", "\n", - "# Policies - read the base policy file and format with different parameters\n", - "pol_aca_backend_pool_load_balancing = utils.read_policy_xml('aca-backend-pool-load-balancing.xml', sample_name = sample_folder)\n", - "pol_aca_backend_pool_prioritized = pol_aca_backend_pool_load_balancing.format(retry_count = 1, backend_id = 'aca-backend-pool-web-api-429-prioritized')\n", - "pol_aca_backend_pool_prioritized_and_weighted = pol_aca_backend_pool_load_balancing.format(retry_count = 2, backend_id = 'aca-backend-pool-web-api-429-prioritized-and-weighted')\n", - "pol_aca_backend_pool_weighted_equal = pol_aca_backend_pool_load_balancing.format(retry_count = 1, backend_id = 'aca-backend-pool-web-api-429-weighted-50-50')\n", - "pol_aca_backend_pool_weighted_unequal = pol_aca_backend_pool_load_balancing.format(retry_count = 1, backend_id = 'aca-backend-pool-web-api-429-weighted-80-20')\n", - "\n", - "# Standard GET Operation\n", + "# Standard GET operation\n", "get = GET_APIOperation('This is a standard GET')\n", "\n", - "# ACA Backend Pools\n", + "# Define APIs with different load balancing strategies\n", "apis: List[API] = [\n", - " API(f'{api_prefix}prioritized-aca-pool', 'Prioritized backend pool', f'/{api_prefix}prioritized', 'This is the API for the prioritized backend pool.', policyXml = pol_aca_backend_pool_prioritized, operations = [get], tags = tags),\n", - " API(f'{api_prefix}prioritized-weighted-aca-pool', 'Prioritized & weighted backend pool', f'/{api_prefix}prioritized-weighted', 'This is the API for the prioritized & weighted backend pool.', policyXml = pol_aca_backend_pool_prioritized_and_weighted, operations = [get], tags = tags),\n", - " API(f'{api_prefix}weighted-equal-aca-pool', 'Weighted backend pool (equal)', f'/{api_prefix}weighted-equal', 'This is the API for the weighted (equal) backend pool.', policyXml = pol_aca_backend_pool_weighted_equal, operations = [get], tags = tags),\n", - " API(f'{api_prefix}weighted-unequal-aca-pool', 'Weighted backend pool (unequal)', f'/{api_prefix}weighted-unequal', 'This is the API for the weighted (unequal) backend pool.', policyXml = pol_aca_backend_pool_weighted_unequal, operations = [get], tags = tags)\n", + " # API 1: Prioritized backend pool\n", + " API(f'{api_prefix}prioritized-aca-pool', 'Prioritized backend pool', f'/{api_prefix}prioritized', \n", + " 'This is the API for the prioritized backend pool.', pol_aca_backend_pool_prioritized, [get], tags),\n", + " # API 2: Prioritized & weighted backend pool\n", + " API(f'{api_prefix}prioritized-weighted-aca-pool', 'Prioritized & weighted backend pool', f'/{api_prefix}prioritized-weighted', \n", + " 'This is the API for the prioritized & weighted backend pool.', pol_aca_backend_pool_prioritized_and_weighted, [get], tags),\n", + " # API 3: Weighted backend pool (equal distribution)\n", + " API(f'{api_prefix}weighted-equal-aca-pool', 'Weighted backend pool (equal)', f'/{api_prefix}weighted-equal', \n", + " 'This is the API for the weighted (equal) backend pool.', pol_aca_backend_pool_weighted_equal, [get], tags),\n", + " # API 4: Weighted backend pool (unequal distribution)\n", + " API(f'{api_prefix}weighted-unequal-aca-pool', 'Weighted backend pool (unequal)', f'/{api_prefix}weighted-unequal', \n", + " 'This is the API for the weighted (unequal) backend pool.', pol_aca_backend_pool_weighted_unequal, [get], tags)\n", "]\n", "\n", - "utils.print_ok('Notebook initialized')" + "utils.print_ok('โœ… Load balancing sample initialized')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", + "## ๐Ÿš€ Deploy Infrastructure and APIs\n", "\n", - "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." + "Deploy the sample configuration to Azure using Bicep templates." ] }, { @@ -70,32 +77,33 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", + "# Deploy the infrastructure and APIs\n", "bicep_parameters = {\n", " 'apis': {'value': [api.to_dict() for api in apis]}\n", "}\n", "\n", - "# 2) Deploy the bicep template\n", "output = nb_helper.deploy_bicep(bicep_parameters)\n", "\n", - "if output.json_data:\n", + "if output.success:\n", + " # Extract deployment outputs for testing\n", " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", " app_insights_name = output.get('applicationInsightsName', 'Application Insights Name')\n", " apim_apis = output.getJson('apiOutputs', 'APIs')\n", "\n", - "utils.print_ok('Deployment completed')" + " print(f\"โœ… Sample deployment completed successfully!\")\n", + "else:\n", + " print(\"โŒ Sample deployment failed!\")\n", + " raise SystemExit(1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### โœ… 3. Verify API Request Success\n", + "## โœ… Verify and Test Load Balancing\n", "\n", - "Assert that the deployment was successful by making simple calls to Azure Front Door or API Management." + "Test the deployed APIs to verify load balancing behavior across different backend pool configurations." ] }, { @@ -104,66 +112,67 @@ "metadata": {}, "outputs": [], "source": [ + "# Test and verify load balancing behavior\n", "import json\n", "import time\n", - "import utils\n", "from apimrequests import ApimRequests\n", "from apimtesting import ApimTesting\n", "\n", "def zzzs():\n", " sleep_in_s = 5\n", - " utils.print_message(f'Waiting for {sleep_in_s} seconds for the backend timeouts to reset before starting the next set of calls', blank_above = True)\n", - " time.sleep(sleep_in_s) # Wait a bit before the next set of calls to allow for the backend timeouts to reset\n", + " utils.print_message(f'Waiting for {sleep_in_s} seconds for the backend timeouts to reset before starting the next set of calls', blank_above=True)\n", + " time.sleep(sleep_in_s)\n", "\n", "tests = ApimTesting(\"Load Balancing Sample Tests\", sample_folder, deployment)\n", "\n", - "# Preflight: Check if the infrastructure architecture deployment uses Azure Front Door. If so, assume that APIM is not directly accessible and use the Front Door URL instead.\n", + "# Get the appropriate endpoint URL for testing\n", "endpoint_url = utils.test_url_preflight_check(deployment, rg_name, apim_gateway_url)\n", "\n", "# Quick test to verify load balancing API is accessible\n", "reqs = ApimRequests(apim_gateway_url, apim_apis[0]['subscriptionPrimaryKey'])\n", - "output = reqs.singleGet('/lb-prioritized', msg = 'Quick test of load balancing API')\n", - "# We expect to see a priority 1 backend (at index 0) with a count of 1 as this is the first request.\n", + "output = reqs.singleGet('/lb-prioritized', msg='Quick test of load balancing API')\n", + "\n", + "# Verify initial response from priority 1 backend\n", "tests.verify(json.loads(output)['index'], 0)\n", "tests.verify(json.loads(output)['count'], 1)\n", "\n", - "# The following test assertions are rather basic. The real verification comes in the charts in the subsequent cell.\n", + "# Test different load balancing strategies\n", + "utils.print_info(\"Testing load balancing strategies...\")\n", "\n", - "# 1) Prioritized API calls\n", - "utils.print_message('1/5: Starting API calls for prioritized distribution (50/50)')\n", - "api_results_prioritized = reqs.multiGet('/lb-prioritized', runs = 15, msg = 'Calling prioritized APIs')\n", + "# 1) Prioritized distribution\n", + "utils.print_message('1/5: Starting API calls for prioritized distribution')\n", + "api_results_prioritized = reqs.multiGet('/lb-prioritized', runs=15, msg='Calling prioritized APIs')\n", "tests.verify(len(api_results_prioritized), 15)\n", "\n", - "# # 2) Weighted API calls\n", + "# 2) Weighted equal distribution\n", "zzzs()\n", - "utils.print_message('2/5: Starting API calls for weighted distribution (50/50)', blank_above = True)\n", + "utils.print_message('2/5: Starting API calls for weighted distribution (50/50)', blank_above=True)\n", "reqs = ApimRequests(apim_gateway_url, apim_apis[2]['subscriptionPrimaryKey'])\n", - "api_results_weighted_equal = reqs.multiGet('/lb-weighted-equal', runs = 15, msg = 'Calling weighted (equal) APIs')\n", + "api_results_weighted_equal = reqs.multiGet('/lb-weighted-equal', runs=15, msg='Calling weighted (equal) APIs')\n", "tests.verify(len(api_results_weighted_equal), 15)\n", "\n", - "# # 3) Weighted API calls\n", + "# 3) Weighted unequal distribution\n", "zzzs()\n", - "utils.print_message('3/5: Starting API calls for weighted distribution (80/20)', blank_above = True)\n", + "utils.print_message('3/5: Starting API calls for weighted distribution (80/20)', blank_above=True)\n", "reqs = ApimRequests(apim_gateway_url, apim_apis[3]['subscriptionPrimaryKey'])\n", - "api_results_weighted_unequal = reqs.multiGet('/lb-weighted-unequal', runs = 15, msg = 'Calling weighted (unequal) APIs')\n", + "api_results_weighted_unequal = reqs.multiGet('/lb-weighted-unequal', runs=15, msg='Calling weighted (unequal) APIs')\n", "tests.verify(len(api_results_weighted_unequal), 15)\n", "\n", - "# 4) Prioritized & weighted API calls\n", + "# 4) Prioritized and weighted distribution\n", "zzzs()\n", - "utils.print_message('4/5: Starting API calls for prioritized & weighted distribution', blank_above = True)\n", + "utils.print_message('4/5: Starting API calls for prioritized & weighted distribution', blank_above=True)\n", "reqs = ApimRequests(apim_gateway_url, apim_apis[1]['subscriptionPrimaryKey'])\n", - "api_results_prioritized_and_weighted = reqs.multiGet('/lb-prioritized-weighted', runs = 20, msg = 'Calling prioritized & weighted APIs')\n", + "api_results_prioritized_and_weighted = reqs.multiGet('/lb-prioritized-weighted', runs=20, msg='Calling prioritized & weighted APIs')\n", "tests.verify(len(api_results_prioritized_and_weighted), 20)\n", "\n", - "# 5) Prioritized & weighted API calls (500ms sleep)\n", + "# 5) Prioritized and weighted with recovery time\n", "zzzs()\n", - "utils.print_message('5/5: Starting API calls for prioritized & weighted distribution (500ms sleep)', blank_above = True)\n", - "api_results_prioritized_and_weighted_sleep = reqs.multiGet('/lb-prioritized-weighted', runs = 20, msg = 'Calling prioritized & weighted APIs', sleepMs = 500)\n", + "utils.print_message('5/5: Starting API calls for prioritized & weighted distribution (500ms sleep)', blank_above=True)\n", + "api_results_prioritized_and_weighted_sleep = reqs.multiGet('/lb-prioritized-weighted', runs=20, msg='Calling prioritized & weighted APIs', sleepMs=500)\n", "tests.verify(len(api_results_prioritized_and_weighted_sleep), 20)\n", "\n", "tests.print_summary()\n", - "\n", - "utils.print_ok('All done!')" + "utils.print_ok('โœ… All load balancing tests completed successfully!')" ] }, { diff --git a/samples/oauth-3rd-party/create.ipynb b/samples/oauth-3rd-party/create.ipynb index 60137dc..4017a5d 100644 --- a/samples/oauth-3rd-party/create.ipynb +++ b/samples/oauth-3rd-party/create.ipynb @@ -4,13 +4,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "## ๐Ÿ› ๏ธ Initialize Sample Environment\n", "\n", - "โ—๏ธ **Run cells 1 & 2 MANUALLY (not via _Run All_)!**\n", + "This OAuth 3rd party APIM sample demonstrates integration with Spotify API using OAuth 2.0 authentication and JWT validation.\n", "\n", - "Configures everything that's needed for deployment. \n", - "\n", - "๐Ÿ‘‰ **Modify entries under _1) User-defined parameters_ and _3) Define the APIs and their operations and policies_**." + "**Prerequisites required:**\n", + "- Spotify Developer account and registered application\n", + "- Environment variables: `SPOTIFY_CLIENT_ID` and `SPOTIFY_CLIENT_SECRET`\n", + "- Manual OAuth connection setup (detailed in deployment steps)" ] }, { @@ -24,42 +25,41 @@ "import os\n", "\n", "# 1) User-defined parameters (change these as needed)\n", - "rg_location = 'eastus2'\n", - "index = 1\n", - "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", - "tags = ['oauth-3rd-party', 'jwt', 'credential-manager', 'policy-fragment'] # ENTER DESCRIPTIVE TAG(S)\n", - "api_prefix = 'oauth-3rd-party-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", - "# OAuth\n", - "client_id = os.getenv('SPOTIFY_CLIENT_ID') # ENTER THE OAUTH CLIENT ID FOR THE BACKEND API\n", - "client_secret = os.getenv('SPOTIFY_CLIENT_SECRET') # ENTER THE OAUTH CLIENT SECRET FOR THE BACKEND API\n", - "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "sample_folder = \"oauth-3rd-party\"\n", - "nb_helper = utils.NotebookHelper(sample_folder, rg_name, rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], True)\n", - "\n", - "if len(client_id) == 0 or len(client_secret) == 0:\n", + "\n", + "# Infrastructure settings\n", + "rg_location = 'eastus2'\n", + "index = 1\n", + "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", + "tags = ['oauth-3rd-party', 'jwt', 'credential-manager', 'policy-fragment']\n", + "api_prefix = 'oauth-3rd-party-'\n", + "\n", + "# OAuth credentials (required environment variables)\n", + "client_id = os.getenv('SPOTIFY_CLIENT_ID')\n", + "client_secret = os.getenv('SPOTIFY_CLIENT_SECRET')\n", + "\n", + "# 2) Define the APIs and their operations and policies\n", + "sample_folder = 'oauth-3rd-party'\n", + "nb_helper = utils.NotebookHelper(sample_folder, utils.get_infra_rg_name(deployment, index), rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], True)\n", + "\n", + "# Validate OAuth credentials\n", + "if not client_id or not client_secret:\n", " utils.print_error('Please set the SPOTIFY_CLIENT_ID and SPOTIFY_CLIENT_SECRET environment variables in the root .env file before running this notebook.')\n", " raise ValueError('Missing Spotify OAuth credentials')\n", "\n", - "# 3) Set up the named values\n", + "# Define named values\n", "nvs: List[NamedValue] = [\n", " NamedValue(nb_helper.jwt_key_name, nb_helper.jwt_key_value_bytes_b64, True),\n", " NamedValue('MarketingMemberRoleId', Role.MARKETING_MEMBER)\n", "]\n", "\n", - "# 4) Define the APIs and their operations and policies\n", - "\n", - "# Policies\n", - "pol_artist_get_xml = utils.read_policy_xml('artist_get.xml', sample_name = sample_folder)\n", - "\n", - "# Read the policy XML without modifications - it already uses correct APIM named value format\n", + "# Load policy definitions\n", + "pol_artist_get_xml = utils.read_policy_xml('artist_get.xml', sample_name=sample_folder)\n", "pol_spotify_api_xml = utils.read_and_modify_policy_xml('spotify_api.xml', {\n", " 'jwt_signing_key': '{{' + nb_helper.jwt_key_name + '}}', \n", " 'marketing_member_role_id': '{{MarketingMemberRoleId}}'\n", - "}, sample_folder) \n", + "}, sample_folder)\n", "\n", - "# Define template parameters for the artists\n", + "# Define template parameters for artist ID\n", "blob_template_parameters = [\n", " {\n", " \"name\": \"id\",\n", @@ -69,24 +69,27 @@ " }\n", "]\n", "\n", - "# Spotify\n", - "spotify_artist_get = GET_APIOperation2('artists-get', 'Artists', '/artists/{id}', 'Gets the artist by their ID', pol_artist_get_xml, templateParameters = blob_template_parameters)\n", + "# Define API operations\n", + "spotify_artist_get = GET_APIOperation2('artists-get', 'Artists', '/artists/{id}', 'Gets the artist by their ID', \n", + " pol_artist_get_xml, templateParameters = blob_template_parameters)\n", "\n", - "# APIs Array\n", + "# Define APIs\n", "apis: List[API] = [\n", - " API(f'{api_prefix}spotify', 'Spotify', f'/{api_prefix}spotify', 'This is the API for interactions with the Spotify REST API', policyXml = pol_spotify_api_xml, operations = [spotify_artist_get], tags = tags),\n", + " # API 1: Spotify API\n", + " API(f'{api_prefix}spotify', 'Spotify', f'/{api_prefix}spotify', \n", + " 'This is the API for interactions with the Spotify REST API', pol_spotify_api_xml, [spotify_artist_get], tags)\n", "]\n", "\n", - "utils.print_ok('Notebook initialized')" + "utils.print_ok('โœ… OAuth 3rd party sample initialized')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", + "## ๐Ÿš€ Deploy Infrastructure and APIs\n", "\n", - "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." + "Deploy the sample configuration to Azure using Bicep templates." ] }, { @@ -95,9 +98,7 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", + "# Deploy the infrastructure and APIs\n", "bicep_parameters = {\n", " 'apis': {'value': [api.to_dict() for api in apis]},\n", " 'namedValues': {'value': [nv.to_dict() for nv in nvs]},\n", @@ -105,56 +106,51 @@ " 'clientSecret': {'value': client_secret}\n", "}\n", "\n", - "# 2) Deploy the bicep template\n", "output = nb_helper.deploy_bicep(bicep_parameters)\n", "\n", - "if output.json_data:\n", + "if output.success:\n", + " # Extract deployment outputs for testing\n", " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", " apim_apis = output.getJson('apiOutputs', 'APIs')\n", " spotify_oauth_redirect_url = output.get('spotifyOAuthRedirectUrl', 'OAuth Redirect URL')\n", "\n", - "utils.print_ok('Deployment completed')" + " print(f\"โœ… Sample deployment completed successfully!\")\n", + "else:\n", + " print(\"โŒ Sample deployment failed!\")\n", + " raise SystemExit(1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### ๐Ÿ—’๏ธ 3. Authenticate API Management with Spotify\n", - "\n", - "โ—๏ธ **The following steps are all manual and cannot presently be automated.**\n", - "\n", - "We have previously created the _APIM_ application in Spotify and have also set Spotify up in Credential Manager via the just-completed bicep. \n", - "\n", - "#### 3.1 Set Redirect URL in Spotify\n", - "\n", - "Now that the API Management instance has been created, we need to update the redirect URI for the _APIM_ application in Spotify.\n", - "\n", - "1. Open the [Spotify Developer Dashboard](https://developer.spotify.com/dashboard), then click on the _APIM_ application.\n", - "1. Press _Edit_ and remove the temporary _localhost_ Redirect URI.\n", - "1. Add the `OAuth Redirect URL` (see output above), then press 'Save`. \n", - "\n", - "#### 3.2 Log API Management into Spotify\n", - "\n", - "We now need to log the _APIM_ application into Spotify via OAuth 2.0.\n", - "\n", - "1. Open the [Azure Portal](https://portal.azure.com) and navigate to your API Management instance.\n", - "1. Expand the _APIs_ blade and click on _Credential manager_. You should see the `spotify` credential provider name. Click on it.\n", - "1. Press _Connections_. You should see `spotify-auth` with an `Error` status (\"This connection is not authenticated.\").\n", - "1. Click on the ellipsis (...) on the right and select _Login_. This should open a dialog with Spotify, asking you to agree for Spotify and APIM to connect. Press _Agree_.\n", - "1. Back in the Azure Portal, press _Refresh_ to see the `Connected` status.\n" + "## \udd27 Configure OAuth Connection (Manual Steps)\n", + "\n", + "**The following steps are manual and required before testing:**\n", + "\n", + "### 3.1 Update Spotify Redirect URL\n", + "1. Open [Spotify Developer Dashboard](https://developer.spotify.com/dashboard)\n", + "2. Click on your APIM application\n", + "3. Press **Edit** and remove the temporary localhost Redirect URI\n", + "4. Add the OAuth Redirect URL from deployment output above\n", + "5. Press **Save**\n", + "\n", + "### 3.2 Authenticate APIM with Spotify\n", + "1. Open [Azure Portal](https://portal.azure.com) and navigate to your API Management instance\n", + "2. Go to **APIs** > **Credential manager** > **spotify**\n", + "3. Click **Connections** > **spotify-auth** (should show \"Error\" status)\n", + "4. Click ellipsis (...) > **Login** and authorize the connection\n", + "5. Press **Refresh** to verify \"Connected\" status" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### โœ… 4. Verify API Request Success\n", + "## โœ… Verify and Test OAuth Integration\n", "\n", - "Assert that the deployment was successful by making simple calls to APIM. \n", - "\n", - "โ—๏ธ If the infrastructure shields APIM and requires a different ingress (e.g. Azure Front Door), the request to the APIM gateway URl will fail by design. Obtain the Front Door endpoint hostname and try that instead." + "Test the deployed APIs to confirm successful OAuth integration with Spotify." ] }, { @@ -163,7 +159,7 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", + "# Test and verify OAuth integration with Spotify\n", "import json\n", "from apimrequests import ApimRequests\n", "from apimtesting import ApimTesting\n", @@ -172,32 +168,39 @@ "\n", "tests = ApimTesting(\"OAuth 3rd Party (Spotify) Sample Tests\", sample_folder, deployment)\n", "\n", - "# 1) Marketing Member Role\n", - "# Create a JSON Web Token with a payload and sign it with the symmetric key from above.\n", - "encoded_jwt_token_marketing_member = AuthFactory.create_symmetric_jwt_token_for_user(UserHelper.get_user_by_role(Role.MARKETING_MEMBER), nb_helper.jwt_key_value)\n", - "print(f'\\nJWT token for Marketing Member:\\n{encoded_jwt_token_marketing_member}') # this value is used to call the APIs via APIM\n", + "# Create JWT token for Marketing Member role\n", + "encoded_jwt_token_marketing_member = AuthFactory.create_symmetric_jwt_token_for_user(\n", + " UserHelper.get_user_by_role(Role.MARKETING_MEMBER), \n", + " nb_helper.jwt_key_value\n", + ")\n", + "utils.print_info(f'JWT token for Marketing Member:\\n{encoded_jwt_token_marketing_member}')\n", "\n", - "# Preflight: Check if the infrastructure architecture deployment uses Azure Front Door. If so, assume that APIM is not directly accessible and use the Front Door URL instead.\n", + "# Get the appropriate endpoint URL for testing\n", "endpoint_url = utils.test_url_preflight_check(deployment, rg_name, apim_gateway_url)\n", "\n", + "# Test Spotify API integration\n", "reqs = ApimRequests(endpoint_url, apim_apis[0]['subscriptionPrimaryKey'])\n", "reqs.headers['Authorization'] = f'Bearer {encoded_jwt_token_marketing_member}'\n", - "artist_id = '06HL4z0CvFAxyc27GXpf02' # Taylor Swift's Spotify Artist ID\n", - "output = reqs.singleGet(f'/oauth-3rd-party-spotify/artists/{artist_id}', msg = 'Calling the Spotify Artist API via API Management Gateway URL.')\n", + "\n", + "# Test artist lookup (Taylor Swift's Spotify Artist ID)\n", + "artist_id = '06HL4z0CvFAxyc27GXpf02'\n", + "output = reqs.singleGet(f'/oauth-3rd-party-spotify/artists/{artist_id}', \n", + " msg='Calling the Spotify Artist API via API Management Gateway URL.')\n", + "\n", "artist = json.loads(output)\n", "tests.verify(artist['name'], 'Taylor Swift')\n", "utils.print_info(f'{artist[\"name\"]} has a popularity rating of {artist[\"popularity\"]} with {artist[\"followers\"][\"total\"]:,} followers on Spotify.')\n", "\n", - "# 2) Unsuccessful call without API subscription key (should fail with 401 Unauthorized)\n", + "# Test unauthorized access (should fail with 401)\n", "reqsNoApiSubscription = ApimRequests(endpoint_url)\n", - "output = reqsNoApiSubscription.singleGet(f'/oauth-3rd-party-spotify/artists/{artist_id}', msg = 'Calling the Spotify Artist API via API Management Gateway URL without API subscription key. Expect 401.')\n", + "output = reqsNoApiSubscription.singleGet(f'/oauth-3rd-party-spotify/artists/{artist_id}', \n", + " msg='Calling the Spotify Artist API without API subscription key. Expect 401.')\n", "outputJson = utils.get_json(output)\n", "tests.verify(outputJson['statusCode'], 401)\n", "tests.verify(outputJson['message'], 'Access denied due to missing subscription key. Make sure to include subscription key when making requests to an API.')\n", "\n", "tests.print_summary()\n", - "\n", - "utils.print_ok('All done!')" + "utils.print_ok('โœ… All OAuth integration tests completed successfully!')" ] } ], diff --git a/samples/secure-blob-access/create.ipynb b/samples/secure-blob-access/create.ipynb index bbcf220..5e60df6 100644 --- a/samples/secure-blob-access/create.ipynb +++ b/samples/secure-blob-access/create.ipynb @@ -5,11 +5,13 @@ "id": "6d879658", "metadata": {}, "source": [ - "### ๐Ÿ› ๏ธ 1. Initialize notebook variables\n", + "## ๐Ÿ› ๏ธ Initialize Sample Environment\n", "\n", - "Configures everything that's needed for deployment.\n", + "This secure blob access APIM sample demonstrates the valet key pattern for secure blob storage access with JWT authentication and authorization.\n", "\n", - "**Modify entries under _1) User-defined parameters_**. The APIs are pre-configured for the valet key pattern implementation." + "**Configuration required:**\n", + "- Review and modify the parameters in the initialization cell below\n", + "- Sample uses managed identity for secure blob access" ] }, { @@ -19,54 +21,75 @@ "metadata": {}, "outputs": [], "source": [ + "# Initialize notebook helper and configuration\n", "import utils\n", "from apimtypes import *\n", "\n", - "# 1) User-defined parameters (change these as needed)\n", + "# ------------------------------\n", + "# USER CONFIGURATION\n", + "# ------------------------------\n", + "\n", + "# Infrastructure settings\n", "rg_location = 'eastus2'\n", - "index = 1\n", - "deployment = INFRASTRUCTURE.SIMPLE_APIM # This sample works with all infrastructures\n", - "tags = ['secure-blob-access', 'valet-key', 'storage', 'jwt', 'authz']\n", - "api_prefix = 'blob-' # OPTIONAL: ENTER A PREFIX FOR THE APIS TO REDUCE COLLISION POTENTIAL WITH OTHER SAMPLES\n", - "\n", - "# 2) Service-defined parameters (please do not change these)\n", - "rg_name = utils.get_infra_rg_name(deployment, index)\n", - "sample_folder = \"secure-blob-access\"\n", - "nb_helper = utils.NotebookHelper(sample_folder, rg_name, rg_location, deployment, [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], True)\n", - "jwt_key_name = nb_helper.jwt_key_name\n", + "index = 1\n", + "deployment = INFRASTRUCTURE.SIMPLE_APIM\n", + "tags = ['secure-blob-access', 'valet-key', 'storage', 'jwt', 'authz']\n", + "api_prefix = 'blob-'\n", + "\n", + "# Blob storage configuration\n", + "container_name = 'hr-assets'\n", + "file_name = 'hr.txt'\n", + "\n", + "# ------------------------------\n", + "# SAMPLE SETUP\n", + "# ------------------------------\n", + "\n", + "sample_folder = 'secure-blob-access'\n", + "rg_name = utils.get_infra_rg_name(deployment, index)\n", + "nb_helper = utils.NotebookHelper(\n", + " sample_folder, \n", + " rg_name, \n", + " rg_location, \n", + " deployment,\n", + " [INFRASTRUCTURE.AFD_APIM_PE, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.SIMPLE_APIM], \n", + " True\n", + ")\n", + "\n", + "jwt_key_name = nb_helper.jwt_key_name\n", "jwt_key_value_bytes_b64 = nb_helper.jwt_key_value_bytes_b64\n", "\n", - "# 3) Set up the named values\n", + "# ------------------------------\n", + "# API CONFIGURATION\n", + "# ------------------------------\n", + "\n", + "# Define named values\n", "nvs: List[NamedValue] = [\n", " NamedValue(jwt_key_name, jwt_key_value_bytes_b64, True),\n", " NamedValue('HRMemberRoleId', Role.HR_MEMBER)\n", "]\n", "\n", - "# 4) Set up the policy fragments\n", + "# Load policy fragment definitions\n", "pf_authx_hr_member_xml = utils.read_policy_xml('pf-authx-hr-member.xml', {\n", " 'jwt_signing_key': jwt_key_name,\n", " 'hr_member_role_id': 'HRMemberRoleId'\n", "}, sample_folder)\n", "\n", - "pf_create_sas_token_xml = utils.read_policy_xml('pf-create-sas-token.xml', sample_name = sample_folder)\n", - "pf_check_blob_existence_via_mi = utils.read_policy_xml('pf-check-blob-existence-via-managed-identity.xml', sample_name = sample_folder)\n", + "pf_create_sas_token_xml = utils.read_policy_xml('pf-create-sas-token.xml', sample_name=sample_folder)\n", + "pf_check_blob_existence_via_mi = utils.read_policy_xml('pf-check-blob-existence-via-managed-identity.xml', sample_name=sample_folder)\n", "\n", + "# Define policy fragments\n", "pfs: List[PolicyFragment] = [\n", " PolicyFragment('AuthX-HR-Member', pf_authx_hr_member_xml, 'Authenticates and authorizes users with HR Member role.'),\n", " PolicyFragment('Create-Sas-Token', pf_create_sas_token_xml, 'Creates a SAS token to use with access to a blob.'),\n", " PolicyFragment('Check-Blob-Existence-via-Managed-Identity', pf_check_blob_existence_via_mi, 'Checks whether the specified blob exists at the blobUrl. A boolean value for blobExists will be available afterwards.')\n", "]\n", "\n", - "# 5) Define blob storage parameters\n", - "container_name = 'hr-assets'\n", - "file_name = 'hr.txt'\n", - "\n", - "# Read the policy XML without modifications - it already uses correct APIM named value format\n", + "# Load API policy\n", "pol_blob_get = utils.read_and_modify_policy_xml('blob-get-operation.xml', {\n", " 'container_name': container_name\n", - "}, sample_folder) \n", + "}, sample_folder)\n", "\n", - "# Define template parameters for the blob name\n", + "# Define template parameters for blob name\n", "blob_template_parameters = [\n", " {\n", " \"name\": \"blob-name\",\n", @@ -76,18 +99,16 @@ " }\n", "]\n", "\n", - "# 6) Define the APIs and their operations and policies\n", + "# Define API operations\n", + "blob_get = GET_APIOperation2('GET', 'GET', '/{blob-name}', 'Gets the blob access valet key info', pol_blob_get, templateParameters=blob_template_parameters)\n", "\n", - "# Secure Blob Access API\n", - "blob_get = GET_APIOperation2('GET', 'GET', '/{blob-name}', 'Gets the blob access valet key info', pol_blob_get, templateParameters = blob_template_parameters)\n", - " \n", - "secure_blob_api = API(name = 'secure-blob-access', displayName = 'Secure Blob Access API', path = f'/{api_prefix}secure-files', \n", - " description = 'API for secure access to blob storage using the valet key pattern', operations = [blob_get], tags = tags)\n", + "# API 1: Secure Blob Access API\n", + "secure_blob_api = API(name='secure-blob-access', displayName='Secure Blob Access API', path=f'/{api_prefix}secure-files', \n", + " description='API for secure access to blob storage using the valet key pattern', operations=[blob_get], tags=tags)\n", "\n", - "# APIs Array\n", "apis: List[API] = [secure_blob_api]\n", "\n", - "utils.print_ok('Notebook initialized')" + "utils.print_ok('โœ… Secure blob access sample initialized')" ] }, { @@ -95,9 +116,9 @@ "id": "9f7453b4", "metadata": {}, "source": [ - "### ๐Ÿš€ 2. Create deployment using Bicep\n", + "## ๐Ÿš€ Deploy Infrastructure and APIs\n", "\n", - "Creates the bicep deployment into the previously-specified resource group. A bicep parameters file will be created prior to execution." + "Deploy the sample configuration to Azure using Bicep templates." ] }, { @@ -107,9 +128,7 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "\n", - "# 1) Define the Bicep parameters with serialized APIs\n", + "# Deploy the infrastructure and APIs\n", "bicep_parameters = {\n", " 'apis': {'value': [api.to_dict() for api in apis]},\n", " 'namedValues': {'value': [nv.to_dict() for nv in nvs]},\n", @@ -118,18 +137,17 @@ " 'blobName': {'value': file_name}\n", "}\n", "\n", - "# 2) Deploy the bicep template\n", "output = nb_helper.deploy_bicep(bicep_parameters)\n", "\n", "if output.json_data:\n", - " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", - " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", + " apim_name = output.get('apimServiceName', 'APIM Service Name')\n", + " apim_gateway_url = output.get('apimResourceGatewayURL', 'APIM API Gateway URL')\n", " storage_account_name = output.get('storageAccountName', 'Storage Account Name')\n", - " storage_endpoint = output.get('storageAccountEndpoint', 'Storage Endpoint')\n", - " container_name = output.get('blobContainerName', 'Blob Container Name')\n", - " apim_apis = output.getJson('apiOutputs', 'APIs')\n", + " storage_endpoint = output.get('storageAccountEndpoint', 'Storage Endpoint')\n", + " container_name = output.get('blobContainerName', 'Blob Container Name')\n", + " apim_apis = output.getJson('apiOutputs', 'APIs')\n", "\n", - "utils.print_ok('Deployment completed')" + "utils.print_ok('โœ… Deployment completed successfully')" ] }, { @@ -137,9 +155,9 @@ "id": "7e96a588", "metadata": {}, "source": [ - "### โœ… 3. Verify APIM Managed Identity Permissions\n", + "## ๐Ÿ” Verify Managed Identity Permissions\n", "\n", - "Before testing secure blob access, we need to ensure that APIM's managed identity has the correct permissions to read from the storage account. This check helps avoid confusion if role assignment propagation is still in progress. The deployment script attempts to assign the **Storage Blob Data Reader** role to APIM's managed identity, but Azure role assignments can take several minutes to propagate fully across all services." + "Ensure APIM's managed identity has proper Storage Blob Data Reader permissions before testing." ] }, { @@ -149,24 +167,22 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "\n", - "# Use the improved permission check utility function\n", - "utils.print_message('Verifying APIM Managed Identity Permissions', blank_above = True)\n", + "# Verify APIM managed identity permissions for blob access\n", + "utils.print_info('Verifying APIM Managed Identity Permissions...')\n", "\n", - "# Run the permission check with automatic retry and clear user feedback\n", + "# Check permissions with automatic retry (role assignments can take time to propagate)\n", "permissions_ready = utils.wait_for_apim_blob_permissions(\n", - " apim_name = apim_name,\n", - " storage_account_name = storage_account_name,\n", - " resource_group_name = rg_name,\n", - " max_wait_minutes = 5 # Allow up to 5 minutes for role propagation\n", + " apim_name=apim_name,\n", + " storage_account_name=storage_account_name,\n", + " resource_group_name=rg_name,\n", + " max_wait_minutes=5\n", ")\n", "\n", "if permissions_ready:\n", - " utils.print_ok('APIM permissions verified successfully')\n", + " utils.print_ok('โœ… APIM permissions verified successfully')\n", "else:\n", - " utils.print_warning('Permission verification incomplete - you may encounter 503/403 errors during testing')\n", - " print(\"๐Ÿ’ก If you see 503 errors in the next step, wait a few minutes and try again.\")" + " utils.print_warning('โš ๏ธ Permission verification incomplete - you may encounter 503/403 errors during testing')\n", + " utils.print_info('๐Ÿ’ก If you see 503 errors in the next step, wait a few minutes and try again.')" ] }, { @@ -174,19 +190,9 @@ "id": "db8190ee", "metadata": {}, "source": [ - "### โœ… 4. Test the Secure Blob Access with Authentication\n", + "## โœ… Verify and Test Secure Blob Access\n", "\n", - "Test the secure blob access API to verify both the authentication/authorization and valet key pattern implementation. We'll:\n", - "1. Create JWT tokens for authorized and unauthorized users\n", - "2. Test API access with valid authentication\n", - "3. Test access denial for unauthorized users \n", - "4. Verify direct blob access using the valet key pattern\n", - "\n", - "The sample file was automatically created during the infrastructure deployment using a Bicep deployment script.\n", - "\n", - "โ—๏ธ If the infrastructure shields APIM and requires a different ingress (e.g. Azure Front Door), the request to the APIM gateway URL will fail by design. Obtain the Front Door endpoint hostname and try that instead.\n", - "\n", - "๐Ÿ” **Note about 503/403 errors**: If you see Service Unavailable (503) or Forbidden (403) errors when accessing blobs through APIM, this is likely due to role assignment propagation delays. The permission check above helps identify this scenario." + "Test the deployed APIs to confirm secure blob access using the valet key pattern with JWT authentication." ] }, { @@ -196,15 +202,16 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", + "# Test and verify secure blob access using valet key pattern\n", + "import json\n", + "import requests\n", "from apimrequests import ApimRequests\n", "from apimtesting import ApimTesting\n", - "import requests\n", - "import json\n", "from users import UserHelper\n", "from authfactory import AuthFactory\n", "\n", "def handleResponse(response):\n", + " \"\"\"Handle blob access response and test direct blob access.\"\"\"\n", " if isinstance(response, str):\n", " try:\n", " access_info = json.loads(response)\n", @@ -213,21 +220,21 @@ " if sas_url == 'N/A':\n", " return response\n", "\n", - " print(f\"\\nSecure Blob URL: {sas_url}\")\n", - " print(f\"Expires At: {access_info.get('expire_at', 'N/A')}\") \n", + " utils.print_info(f\"Secure Blob URL: {sas_url}\")\n", + " utils.print_info(f\"Expires At: {access_info.get('expire_at', 'N/A')}\")\n", "\n", - " # Test direct access to the blob using the provided credentials\n", - " utils.print_message(\"๐Ÿงช Testing direct blob access...\", blank_above = True)\n", + " # Test direct blob access using the valet key (SAS URL)\n", + " utils.print_info(\"๐Ÿงช Testing direct blob access...\")\n", " \n", " try:\n", " blob_response = requests.get(access_info['sas_url'])\n", " if blob_response.status_code == 200:\n", - " utils.print_info(\"Direct blob access successful!\")\n", + " utils.print_info(\"โœ… Direct blob access successful!\")\n", " content_preview = blob_response.text[:200] + \"...\" if len(blob_response.text) > 200 else blob_response.text\n", - " utils.print_val(f\"Content preview:\", content_preview.strip(), True)\n", + " utils.print_val(\"Content preview:\", content_preview.strip(), True)\n", " return content_preview.strip()\n", " else:\n", - " utils.print_error(f\"Direct blob access failed: {blob_response.status_code}\")\n", + " utils.print_error(f\"โŒ Direct blob access failed: {blob_response.status_code}\")\n", " return blob_response.status_code\n", " except Exception as e:\n", " utils.print_error(f\"Error accessing blob directly: {str(e)}\")\n", @@ -237,47 +244,52 @@ "\n", "tests = ApimTesting(\"Secure Blob Access Sample Tests\", sample_folder, deployment)\n", "\n", - "api_subscription_key = apim_apis[0]['subscriptionPrimaryKey']\n", - "\n", - "# Preflight: Check if the infrastructure architecture deployment uses Azure Front Door\n", + "# Get the appropriate endpoint URL for testing\n", "endpoint_url = utils.test_url_preflight_check(deployment, rg_name, apim_gateway_url)\n", + "api_subscription_key = apim_apis[0]['subscriptionPrimaryKey']\n", "\n", - "# 1) Test with authorized user (has blob access role)\n", - "utils.print_message('1) Testing with Authorized User', blank_above = True)\n", + "# Test 1: Authorized user with HR Member role\n", + "utils.print_info(\"1๏ธโƒฃ Testing with Authorized User (HR Member role)\")\n", "\n", - "# Create a JWT with the HR Member role (blob access)\n", - "encoded_jwt_token_hr_member = AuthFactory.create_symmetric_jwt_token_for_user(UserHelper.get_user_by_role(Role.HR_MEMBER), nb_helper.jwt_key_value)\n", - "print(f'\\nJWT token for HR Member:\\n{encoded_jwt_token_hr_member}') # this value is used to call the APIs via APIM\n", + "# Create JWT token for HR Member role\n", + "encoded_jwt_token_hr_member = AuthFactory.create_symmetric_jwt_token_for_user(\n", + " UserHelper.get_user_by_role(Role.HR_MEMBER), \n", + " nb_helper.jwt_key_value\n", + ")\n", + "utils.print_info(f'JWT token for HR Member:\\n{encoded_jwt_token_hr_member}')\n", "\n", - "# Set up APIM requests with authorized JWT\n", + "# Test secure blob access with authorization\n", "reqsApimAuthorized = ApimRequests(endpoint_url, api_subscription_key)\n", "reqsApimAuthorized.headers['Authorization'] = f'Bearer {encoded_jwt_token_hr_member}'\n", "\n", - "# Test sample file access\n", - "print(f\"\\n๐Ÿ”’ Getting secure access for {file_name} with authorized user...\")\n", - "response = reqsApimAuthorized.singleGet(f'/{api_prefix}secure-files/{file_name}', msg = f'Requesting secure access for {file_name} (authorized)')\n", + "utils.print_info(f\"๐Ÿ”’ Getting secure access for {file_name} with authorized user...\")\n", + "response = reqsApimAuthorized.singleGet(f'/{api_prefix}secure-files/{file_name}', \n", + " msg=f'Requesting secure access for {file_name} (authorized)')\n", "output = handleResponse(response)\n", "tests.verify(output, 'This is an HR document.')\n", "\n", - "# 2) Test with unauthorized user (has blob access role)\n", - "utils.print_message('2) Testing with Unauthorized User', blank_above = True)\n", + "# Test 2: Unauthorized user without required role\n", + "utils.print_info(\"2๏ธโƒฃ Testing with Unauthorized User (no role)\")\n", "\n", - "encoded_jwt_token_no_role = AuthFactory.create_symmetric_jwt_token_for_user(UserHelper.get_user_by_role(Role.NONE), nb_helper.jwt_key_value)\n", - "print(f'\\nJWT token for user with no role:\\n{encoded_jwt_token_no_role}') # this value is used to call the APIs via APIM\n", + "# Create JWT token for user with no role\n", + "encoded_jwt_token_no_role = AuthFactory.create_symmetric_jwt_token_for_user(\n", + " UserHelper.get_user_by_role(Role.NONE), \n", + " nb_helper.jwt_key_value\n", + ")\n", + "utils.print_info(f'JWT token for user with no role:\\n{encoded_jwt_token_no_role}')\n", "\n", - "# Set up APIM requests with unauthorized JWT\n", - "reqsApimAuthorized = ApimRequests(endpoint_url, api_subscription_key)\n", - "reqsApimAuthorized.headers['Authorization'] = f'Bearer {encoded_jwt_token_no_role}'\n", + "# Test access denial for unauthorized user\n", + "reqsApimUnauthorized = ApimRequests(endpoint_url, api_subscription_key)\n", + "reqsApimUnauthorized.headers['Authorization'] = f'Bearer {encoded_jwt_token_no_role}'\n", "\n", - "# Test sample file access\n", - "print(f\"\\n๐Ÿ”’ Attempting to obtain secure access for {file_name} with unauthorized user (expect 401/403)...\")\n", - "response = reqsApimAuthorized.singleGet(f'/{api_prefix}secure-files/{file_name}', msg = f'Requesting secure access for {file_name} (authorized)')\n", + "utils.print_info(f\"๐Ÿ”’ Attempting to obtain secure access for {file_name} with unauthorized user (expect 401/403)...\")\n", + "response = reqsApimUnauthorized.singleGet(f'/{api_prefix}secure-files/{file_name}', \n", + " msg=f'Requesting secure access for {file_name} (unauthorized)')\n", "output = handleResponse(response)\n", "tests.verify(json.loads(output)['statusCode'], 401)\n", "\n", "tests.print_summary()\n", - "\n", - "utils.print_ok('All done!')\n" + "utils.print_ok('โœ… All secure blob access tests completed successfully!')" ] } ], diff --git a/shared/python/utils.py b/shared/python/utils.py index d7e89b2..7a36a01 100644 --- a/shared/python/utils.py +++ b/shared/python/utils.py @@ -27,11 +27,11 @@ # Define ANSI escape code constants for clarity in the print commands below -RESET = "\x1b[0m" -BOLD_B = "\x1b[1;34m" # blue -BOLD_R = "\x1b[1;31m" # red -BOLD_G = "\x1b[1;32m" # green -BOLD_Y = "\x1b[1;33m" # yellow +BOLD_B = "\x1b[1;34m" # blue +BOLD_G = "\x1b[1;32m" # green +BOLD_R = "\x1b[1;31m" # red +BOLD_Y = "\x1b[1;33m" # yellow +RESET = "\x1b[0m" CONSOLE_WIDTH = 175 @@ -102,7 +102,7 @@ def __init__(self, success: bool, text: str): self.is_json = self.json_data is not None - def get(self, key: str, label: str = '', secure: bool = False) -> str | None: + def get(self, key: str, label: str = '', secure: bool = False, suppress_logging: bool = False) -> str | None: """ Retrieve a deployment output property by key, with optional label and secure masking. @@ -136,7 +136,7 @@ def get(self, key: str, label: str = '', secure: bool = False) -> str | None: elif key in self.json_data: deployment_output = self.json_data[key]['value'] - if label: + if not suppress_logging and label: if secure and isinstance(deployment_output, str) and len(deployment_output) >= 4: print_val(label, f"****{deployment_output[-4:]}") else: @@ -153,7 +153,7 @@ def get(self, key: str, label: str = '', secure: bool = False) -> str | None: return None - def getJson(self, key: str, label: str = '', secure: bool = False) -> Any: + def getJson(self, key: str, label: str = '', secure: bool = False, suppress_logging: bool = False) -> Any: """ Retrieve a deployment output property by key and return it as a JSON object. This method is independent from get() and retrieves the raw deployment output value. @@ -188,7 +188,7 @@ def getJson(self, key: str, label: str = '', secure: bool = False) -> Any: elif key in self.json_data: deployment_output = self.json_data[key]['value'] - if label: + if not suppress_logging and label: if secure and isinstance(deployment_output, str) and len(deployment_output) >= 4: print_val(label, f"****{deployment_output[-4:]}") else: @@ -221,16 +221,110 @@ def getJson(self, key: str, label: str = '', secure: bool = False) -> Any: return None +class InfrastructureNotebookHelper: + """ + Helper class for managing infrastructure notebooks. + Provides methods to execute infrastructure creation notebooks and handle outputs. + """ + + # ------------------------------ + # CONSTRUCTOR + # ------------------------------ + + def __init__(self, rg_location: str, deployment: INFRASTRUCTURE, index: int, apim_sku: APIM_SKU): + """ + Initialize the InfrastructureNotebookHelper. + + Args: + rg_location (str): Azure region for deployment. + deployment (INFRASTRUCTURE): Infrastructure type to deploy. + index (int): Index for multi-instance deployments. + apim_sku (APIM_SKU): SKU for API Management service. + """ + + self.rg_location = rg_location + self.deployment = deployment + self.index = index + self.apim_sku = apim_sku + + # ------------------------------ + # PUBLIC METHODS + # ------------------------------ + + def create_infrastructure(self, bypass_infrastructure_check: bool = False) -> bool: + """ + Create infrastructure by executing the appropriate creation script. + + Args: + bypass_infrastructure_check (bool): Skip infrastructure existence check. Defaults to False. + + Returns: + bool: True if infrastructure creation succeeded, False otherwise. + """ + + import sys + if bypass_infrastructure_check or not does_infrastructure_exist(self.deployment, self.index): + # Map infrastructure types to their folder names + infra_folder_map = { + INFRASTRUCTURE.SIMPLE_APIM: 'simple-apim', + INFRASTRUCTURE.AFD_APIM_PE: 'afd-apim-pe', + INFRASTRUCTURE.APIM_ACA: 'apim-aca' + } + + infra_folder = infra_folder_map.get(self.deployment) + if not infra_folder: + print(f"โŒ Unsupported infrastructure type: {self.deployment.value}") + return False + + # Build the command to call the infrastructure creation script + cmd_args = [ + sys.executable, + os.path.join(find_project_root(), 'infrastructure', infra_folder, 'create_infrastructure.py'), + '--location', self.rg_location, + '--sku', str(self.apim_sku.value), + '--index', str(self.index) + ] + + # Execute the infrastructure creation script with real-time output streaming and UTF-8 encoding to handle Unicode characters properly + process = subprocess.Popen(cmd_args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, text = True, + bufsize = 1, universal_newlines = True, encoding = 'utf-8', errors = 'replace') + + try: + # Stream output in real-time + for line in process.stdout: + print(line.rstrip()) + except Exception as e: + print(f"Error reading subprocess output: {e}") + + # Wait for process to complete + process.wait() + + return process.returncode == 0 + + return True + class NotebookHelper: - def __init__(self, sample_folder: str, rg_name: str, rg_location: str, deployment: INFRASTRUCTURE, supported_infrastructures = list[INFRASTRUCTURE], use_jwt: bool = False): + """ + Helper class for managing sample notebook deployments and infrastructure interaction. + """ + + # ------------------------------ + # CONSTRUCTOR + # ------------------------------ + + def __init__(self, sample_folder: str, rg_name: str, rg_location: str, deployment: INFRASTRUCTURE, supported_infrastructures = list[INFRASTRUCTURE], use_jwt: bool = False, index: int = 1): """ - Initialize the NotebookHelper with a name and resource group. + Initialize the NotebookHelper with sample configuration and infrastructure details. Args: sample_folder (str): The name of the sample folder. rg_name (str): The name of the resource group associated with the notebook. rg_location (str): The Azure region for deployment. + deployment (INFRASTRUCTURE): The infrastructure type to use. + supported_infrastructures (list[INFRASTRUCTURE]): List of supported infrastructure types. + use_jwt (bool): Whether to generate JWT tokens. Defaults to False. + index (int): Index for multi-instance deployments. Defaults to 1. """ self.sample_folder = sample_folder @@ -239,39 +333,313 @@ def __init__(self, sample_folder: str, rg_name: str, rg_location: str, deploymen self.deployment = deployment self.supported_infrastructures = supported_infrastructures self.use_jwt = use_jwt + self.index = index validate_infrastructure(deployment, supported_infrastructures) if use_jwt: self._create_jwt() + # ------------------------------ + # PRIVATE METHODS + # ------------------------------ + def _create_jwt(self) -> None: + """Create JWT signing key and values for the sample.""" + # Set up the signing key for the JWT policy self.jwt_key_name = f'JwtSigningKey-{self.sample_folder}-{int(time.time())}' self.jwt_key_value, self.jwt_key_value_bytes_b64 = generate_signing_key() print_val('JWT key value', self.jwt_key_value) # this value is used to create the signed JWT token for requests to APIM print_val('JWT key value (base64)', self.jwt_key_value_bytes_b64) # this value is used in the APIM validate-jwt policy's issuer-signing-key attribute + def _get_current_index(self) -> int | None: + """ + Extract the index from the current resource group name. + + Returns: + int | None: The index if it exists, None otherwise. + """ + + prefix = f'apim-infra-{self.deployment.value}' + + if self.rg_name == prefix: + return None + elif self.rg_name.startswith(f'{prefix}-'): + try: + index_str = self.rg_name[len(f'{prefix}-'):] + return int(index_str) + except ValueError: + return None + + return None + def _clean_up_jwt(self, apim_name: str) -> None: - # 5) Clean up old JWT signing keys after successful deployment + """Clean up old JWT signing keys after successful deployment.""" + + # Clean up old JWT signing keys after successful deployment if not cleanup_old_jwt_signing_keys(apim_name, self.rg_name, self.jwt_key_name): print_warning('JWT key cleanup failed, but deployment was successful. Old keys may need manual cleanup.') + def _query_and_select_infrastructure(self) -> tuple[INFRASTRUCTURE | None, int | None]: + """ + Query for available infrastructures and allow user to select one or create new infrastructure. + + Returns: + tuple: (selected_infrastructure, selected_index) or (None, None) if no valid option + """ + + print_info('Querying for available infrastructures...', blank_above = True) + + # Get all resource groups that match the infrastructure pattern + available_options = [] + + for infra in self.supported_infrastructures: + infra_options = self._find_infrastructure_instances(infra) + available_options.extend(infra_options) + + # Check if the desired infrastructure/index combination exists + desired_rg_name = get_infra_rg_name(self.deployment, self._get_current_index()) + desired_exists = any( + get_infra_rg_name(infra, idx) == desired_rg_name + for infra, idx in available_options + ) + + if desired_exists: + # Scenario 1: Desired infrastructure exists, use it directly + print_success(f'Found desired infrastructure: {self.deployment.value} with resource group {desired_rg_name}') + return self.deployment, self._get_current_index() + + # Sort available options by infrastructure type, then by index + available_options.sort(key = lambda x: (x[0].value, x[1] if x[1] is not None else 0)) + + # Prepare display options + display_options = [] + option_counter = 1 + + # Add existing infrastructure options + if available_options: + print_info(f'Found {len(available_options)} existing infrastructure(s). You can select an existing or create a new one.') + print(f'\n Select an EXISTING infrastructure:') + + for infra, index in available_options: + index_str = f' (index: {index})' if index is not None else '' + rg_name = get_infra_rg_name(infra, index) + print(f' {option_counter}. {infra.value}{index_str} - Resource Group: {rg_name}') + display_options.append(('existing', infra, index)) + option_counter += 1 + else: + print_warning('No existing supported infrastructures found.') + + # Add option to create the desired infrastructure + desired_index_str = f' (index: {self._get_current_index()})' if self._get_current_index() is not None else '' + print(f'\n Create a NEW infrastructure:') + print(f' {option_counter}. {self.deployment.value}{desired_index_str} - Resource Group: {desired_rg_name}') + display_options.append(('create_new', self.deployment, self._get_current_index())) + + print('') + + # Get user selection + while True: + try: + if available_options: + choice = input(f'Select infrastructure (1-{len(display_options)}) or press Enter to exit: ').strip() + else: + choice = input(f'Create new infrastructure ({len(display_options)}) or press Enter to exit: ').strip() + + if not choice: + print_warning('No infrastructure selected. Exiting.') + return None, None + + choice_idx = int(choice) - 1 + if 0 <= choice_idx < len(display_options): + option_type, selected_infra, selected_index = display_options[choice_idx] + + if option_type == 'existing': + print_success(f'Selected existing: {selected_infra.value}{" (index: " + str(selected_index) + ")" if selected_index is not None else ""}') + return selected_infra, selected_index + elif option_type == 'create_new': + print_info(f'Creating new infrastructure: {selected_infra.value}{" (index: " + str(selected_index) + ")" if selected_index is not None else ""}') + + # Execute the infrastructure creation + inb_helper = InfrastructureNotebookHelper(self.rg_location, self.deployment, selected_index, APIM_SKU.BASICV2) + success = inb_helper.create_infrastructure(True) # Bypass infrastructure check to force creation + + if success: + print_success(f'Successfully created infrastructure: {selected_infra.value}{" (index: " + str(selected_index) + ")" if selected_index is not None else ""}') + return selected_infra, selected_index + else: + print_error('Failed to create infrastructure.') + return None, None + else: + print_error(f'Invalid choice. Please enter a number between 1 and {len(display_options)}.') + + except ValueError: + print_error('Invalid input. Please enter a number.') + except KeyboardInterrupt: + print_warning('\nOperation cancelled by user.') + return None, None + + def _find_infrastructure_instances(self, infrastructure: INFRASTRUCTURE) -> list[tuple[INFRASTRUCTURE, int | None]]: + """ + Find all instances of a specific infrastructure type by querying Azure resource groups. + + Args: + infrastructure (INFRASTRUCTURE): The infrastructure type to search for. + + Returns: + list: List of tuples (infrastructure, index) for found instances. + """ + + instances = [] + + # Query Azure for resource groups with the infrastructure tag + query_cmd = f'az group list --tag infrastructure={infrastructure.value} --query "[].name" -o tsv' + output = run(query_cmd, print_command_to_run = False, print_errors = False) + + if output.success and output.text.strip(): + rg_names = [name.strip() for name in output.text.strip().split('\n') if name.strip()] + + for rg_name in rg_names: + # Parse the resource group name to extract the index + # Expected format: apim-infra-{infrastructure}-{index} or apim-infra-{infrastructure} + prefix = f'apim-infra-{infrastructure.value}' + + if rg_name == prefix: + # No index + instances.append((infrastructure, None)) + elif rg_name.startswith(prefix + '-'): + # Has index + try: + index_str = rg_name[len(prefix + '-'):] + index = int(index_str) + instances.append((infrastructure, index)) + except ValueError: + # Invalid index format, skip + continue + + return instances + + # ------------------------------ + # PUBLIC METHODS + # ------------------------------ + + def deploy_sample(self, bicep_parameters: dict) -> Output: + """ + Deploy a sample with infrastructure auto-detection and selection. + + Args: + bicep_parameters (dict): Parameters for the Bicep template deployment. + + Returns: + Output: The deployment result. + """ + + # Check infrastructure availability and let user select or create + print("Checking infrastructure availability...") + print(f"Desired infrastructure : {self.deployment.value}") + print(f"Desired index : {self.index}") + print(f"Desired resource group : {self.rg_name}") + + # Call the resource group existence check only once + rg_exists = does_resource_group_exist(self.rg_name) + print(f"Resource group exists: {rg_exists}") + + # If the desired infrastructure doesn't exist, use the interactive selection process + if not rg_exists: + print(f"\nDesired infrastructure does not exist. Querying for available options...") + + # Check if we've already done infrastructure selection (prevent double execution) + if 'infrastructure_selection_completed' not in globals(): + # Use the NotebookHelper's infrastructure selection process + selected_deployment, selected_index = self._query_and_select_infrastructure() + + if selected_deployment is None: + raise SystemExit(1) + + # Update the notebook helper with the selected infrastructure + self.deployment = selected_deployment + self.index = selected_index + self.rg_name = get_infra_rg_name(self.deployment, self.index) + + print(f"โœ… Using infrastructure : {self.deployment.value}") + print(f"๐Ÿ“ฆ Resource group : {self.rg_name}") + + # Verify the updates were applied correctly + print(f"๐Ÿ“ Updated variables : deployment = {self.deployment.value}, index = {self.index}, rg_name = {self.rg_name}") + else: + print("โœ… Infrastructure selection already completed in this session") + else: + print("\nโœ… Desired infrastructure already exists, proceeding with sample deployment") + + # Deploy the sample APIs to the selected infrastructure + print(f"\n๐Ÿš€ Deploying sample APIs to infrastructure: {self.deployment.value}") + print(f"๐Ÿ“ฆ Resource group: {self.rg_name}") + + # Get the current sample directory + sample_dir = Path.cwd() if Path.cwd().name == self.sample_folder else Path(find_project_root()) / 'samples' / self.sample_folder + original_cwd = os.getcwd() + + try: + # Change to sample directory + os.chdir(sample_dir) + print(f"๐Ÿ“ Changed working directory to: {sample_dir}") + + # Prepare deployment parameters + bicep_parameters_format = { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": bicep_parameters + } + + # Write the parameters file + params_file_path = sample_dir / 'params.json' + with open(params_file_path, 'w') as file: + file.write(json.dumps(bicep_parameters_format)) + + print(f"๐Ÿ“ Updated the bicep parameters file 'params.json'") + + # Run the deployment directly + main_bicep_path = sample_dir / 'main.bicep' + output = run( + f'az deployment group create --name {self.sample_folder} --resource-group {self.rg_name} --template-file "{main_bicep_path}" --parameters "{params_file_path}" --query "properties.outputs"', + f"Sample deployment '{self.sample_folder}' succeeded", + f"Sample deployment '{self.sample_folder}' failed." + ) + + return output + finally: + # Always restore the original working directory + os.chdir(original_cwd) + print(f"๐Ÿ“ Restored working directory to: {original_cwd}") + def deploy_bicep(self, bicep_parameters: dict) -> Output: """ - Deploy a Bicep template for the sample. + Deploy a Bicep template for the sample with infrastructure auto-detection. Args: bicep_parameters (dict): Parameters for the Bicep template. Returns: - Object: The deployment's output object + Output: The deployment's output object. """ # Infrastructure must be in place before samples can be layered on top if not does_resource_group_exist(self.rg_name): - print_error(f'The specified infrastructure resource group and its resources must exist first. Please check that the user-defined parameters above are correctly referencing an existing infrastructure. If it does not yet exist, run the desired infrastructure in the /infra/ folder first.') - raise SystemExit(1) + print_error(f'The specified infrastructure resource group and its resources must exist first.') + + # Query for available infrastructures and let user select + selected_deployment, selected_index = self._query_and_select_infrastructure() + + if selected_deployment is None: + print_error('No suitable infrastructure found. Please create the required infrastructure first.') + raise SystemExit(1) + + # Update the helper with the selected infrastructure and index + self.deployment = selected_deployment + self.rg_name = get_infra_rg_name(selected_deployment, selected_index) + print_success(f'Updated to use infrastructure: {selected_deployment.value} (index: {selected_index})') + print_val('New resource group name', self.rg_name) # Execute the deployment using the utility function that handles working directory management output = create_bicep_deployment_group_for_sample(self.sample_folder, self.rg_name, self.rg_location, bicep_parameters) @@ -667,6 +1035,32 @@ def create_resource_group(rg_name: str, resource_group_location: str | None = No f"Failed to create the resource group '{rg_name}'", False, True, False, False) +def does_infrastructure_exist(infrastructure: INFRASTRUCTURE, index: int) -> bool: + """ + Check if a specific infrastructure exists by querying the resource group. + + Args: + infrastructure (INFRASTRUCTURE): The infrastructure type to check. + index (int): index for multi-instance infrastructures. + + Returns: + bool: True if the infrastructure exists, False otherwise. + """ + + print(f"๐Ÿ” Checking if infrastructure already exists...") + + rg_name = get_infra_rg_name(infrastructure, index) + + if does_resource_group_exist(rg_name): + print(f"โœ… Infrastructure already exists!\n") + print("โ„น๏ธ To redeploy, either:") + print(" 1. Use a different index number, or") + print(" 2. Delete the existing resource group first using the clean-up notebook") + return True + else: + print(" Infrastructure does not yet exist.") + return False + def does_resource_group_exist(rg_name: str) -> bool: """ Check if a resource group exists in Azure. @@ -693,7 +1087,7 @@ def read_and_modify_policy_xml(policy_xml_filepath: str, replacements: dict[str, """ policy_xml_filepath = determine_policy_path(policy_xml_filepath, sample_name) - print(f"๐Ÿ“„ Reading policy XML from : {policy_xml_filepath}") + # print(f"๐Ÿ“„ Reading policy XML from : {policy_xml_filepath}") # debug # Read the specified policy XML file with open(policy_xml_filepath, 'r', encoding='utf-8') as policy_xml_file: @@ -793,7 +1187,7 @@ def read_policy_xml(policy_xml_filepath_or_filename: str, named_values: dict[str """ policy_xml_filepath = determine_policy_path(policy_xml_filepath_or_filename, sample_name) - print(f"๐Ÿ“„ Reading policy XML from : {policy_xml_filepath}") + # print(f"๐Ÿ“„ Reading policy XML from : {policy_xml_filepath}") # debug # Read the specified policy XML file with open(policy_xml_filepath, 'r', encoding='utf-8') as policy_xml_file: @@ -1009,7 +1403,7 @@ def get_infra_rg_name(deployment_name: INFRASTRUCTURE, index: int | None = None) Args: deployment_name (INFRASTRUCTURE): The infrastructure deployment enum value. - index (int, optional): An optional index to append to the name. + index (int | None): An optional index to append to the name. Defaults to None. Returns: str: The generated resource group name. @@ -1018,9 +1412,7 @@ def get_infra_rg_name(deployment_name: INFRASTRUCTURE, index: int | None = None) rg_name = f"apim-infra-{deployment_name.value}" if index is not None: - rg_name = f"{rg_name}-{str(index)}" - - print_val("Resource group name", rg_name) + rg_name = f"{rg_name}-{index}" return rg_name @@ -1117,18 +1509,19 @@ def run(command: str, ok_message: str = '', error_message: str = '', print_outpu def validate_infrastructure(infra: INFRASTRUCTURE, supported_infras: list[INFRASTRUCTURE]) -> None: """ - Validate that the provided infrastructure is a supported infrastructure. + Validate that the provided infrastructure is supported. Args: infra (INFRASTRUCTURE): The infrastructure deployment enum value. - supported_infras (list[INFRASTRUCTURE]): List of supported infrastructures. + supported_infras (list[INFRASTRUCTURE]): List of supported infrastructure types. Raises: ValueError: If the infrastructure is not supported. """ if infra not in supported_infras: - raise ValueError(f"Unsupported infrastructure: {infra}. Supported infrastructures are: {', '.join([i.value for i in supported_infras])}") + supported_names = ', '.join([i.value for i in supported_infras]) + raise ValueError(f"Unsupported infrastructure: {infra}. Supported infrastructures are: {supported_names}") def generate_signing_key() -> tuple[str, str]: """ diff --git a/tests/python/test_utils.py b/tests/python/test_utils.py index c715a9a..3e8ae25 100644 --- a/tests/python/test_utils.py +++ b/tests/python/test_utils.py @@ -952,3 +952,434 @@ def mock_run(command, ok_message='', error_message='', print_output=False, print import json + + +# ------------------------------ +# INFRASTRUCTURE SELECTION TESTS +# ------------------------------ + +def test_find_infrastructure_instances_success(monkeypatch): + """Test _find_infrastructure_instances with successful Azure query.""" + # Create a mock NotebookHelper instance + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock successful Azure CLI response + mock_output = utils.Output(success=True, text='apim-infra-simple-apim-1\napim-infra-simple-apim-2\napim-infra-simple-apim') + monkeypatch.setattr(utils, 'run', lambda *args, **kwargs: mock_output) + + result = nb_helper._find_infrastructure_instances(INFRASTRUCTURE.SIMPLE_APIM) + + expected = [ + (INFRASTRUCTURE.SIMPLE_APIM, None), + (INFRASTRUCTURE.SIMPLE_APIM, 1), + (INFRASTRUCTURE.SIMPLE_APIM, 2) + ] + # Check that we have the expected results regardless of order + assert len(result) == len(expected) + assert set(result) == set(expected) + +def test_find_infrastructure_instances_no_results(monkeypatch): + """Test _find_infrastructure_instances with no matching resource groups.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock empty Azure CLI response + mock_output = utils.Output(success=True, text='') + monkeypatch.setattr(utils, 'run', lambda *args, **kwargs: mock_output) + + result = nb_helper._find_infrastructure_instances(INFRASTRUCTURE.SIMPLE_APIM) + assert result == [] + +def test_find_infrastructure_instances_failure(monkeypatch): + """Test _find_infrastructure_instances when Azure CLI fails.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock failed Azure CLI response + mock_output = utils.Output(success=False, text='Error: Authentication failed') + monkeypatch.setattr(utils, 'run', lambda *args, **kwargs: mock_output) + + result = nb_helper._find_infrastructure_instances(INFRASTRUCTURE.SIMPLE_APIM) + assert result == [] + +def test_find_infrastructure_instances_invalid_names(monkeypatch): + """Test _find_infrastructure_instances with invalid resource group names.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock Azure CLI response with valid and invalid names + mock_output = utils.Output( + success=True, + text='apim-infra-simple-apim-1\napim-infra-simple-apim-invalid\napim-infra-simple-apim-2\napim-infra-different' + ) + monkeypatch.setattr(utils, 'run', lambda *args, **kwargs: mock_output) + + result = nb_helper._find_infrastructure_instances(INFRASTRUCTURE.SIMPLE_APIM) + + # Should only include valid names and skip invalid ones + expected = [ + (INFRASTRUCTURE.SIMPLE_APIM, 1), + (INFRASTRUCTURE.SIMPLE_APIM, 2) + ] + # Check that we have the expected results regardless of order + assert len(result) == len(expected) + assert set(result) == set(expected) + +def test_find_infrastructure_instances_mixed_formats(monkeypatch): + """Test _find_infrastructure_instances with mixed indexed and non-indexed names.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.APIM_ACA, [INFRASTRUCTURE.APIM_ACA] + ) + + # Mock Azure CLI response with mixed formats + mock_output = utils.Output( + success=True, + text='apim-infra-apim-aca\napim-infra-apim-aca-1\napim-infra-apim-aca-5' + ) + monkeypatch.setattr(utils, 'run', lambda *args, **kwargs: mock_output) + + result = nb_helper._find_infrastructure_instances(INFRASTRUCTURE.APIM_ACA) + + expected = [ + (INFRASTRUCTURE.APIM_ACA, None), + (INFRASTRUCTURE.APIM_ACA, 1), + (INFRASTRUCTURE.APIM_ACA, 5) + ] + # Check that we have the expected results regardless of order + assert len(result) == len(expected) + assert set(result) == set(expected) + +def test_query_and_select_infrastructure_no_options(monkeypatch): + """Test _query_and_select_infrastructure when no infrastructures are available.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM, INFRASTRUCTURE.APIM_ACA] + ) + + # Mock empty results for all infrastructure types + monkeypatch.setattr(nb_helper, '_find_infrastructure_instances', lambda x: []) + monkeypatch.setattr(utils, 'print_info', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_warning', lambda *args, **kwargs: None) + # Mock input to return empty string (simulating user pressing Enter to exit) + monkeypatch.setattr('builtins.input', lambda prompt: '') + + result = nb_helper._query_and_select_infrastructure() + assert result == (None, None) + +def test_query_and_select_infrastructure_single_option(monkeypatch): + """Test _query_and_select_infrastructure with a single available option.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM, INFRASTRUCTURE.APIM_ACA] + ) + + # Mock single result + def mock_find_instances(infra): + if infra == INFRASTRUCTURE.SIMPLE_APIM: + return [(INFRASTRUCTURE.SIMPLE_APIM, 1)] + return [] + + monkeypatch.setattr(nb_helper, '_find_infrastructure_instances', mock_find_instances) + monkeypatch.setattr(utils, 'print_info', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_success', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'get_infra_rg_name', lambda infra, idx: f'apim-infra-{infra.value}-{idx}') + monkeypatch.setattr('builtins.print', lambda *args, **kwargs: None) + + # Mock user input to select option 1 + monkeypatch.setattr('builtins.input', lambda prompt: '1') + + result = nb_helper._query_and_select_infrastructure() + assert result == (INFRASTRUCTURE.SIMPLE_APIM, 1) + +def test_query_and_select_infrastructure_multiple_options(monkeypatch): + """Test _query_and_select_infrastructure with multiple available options.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM, INFRASTRUCTURE.APIM_ACA] + ) + + # Mock multiple results + def mock_find_instances(infra): + if infra == INFRASTRUCTURE.SIMPLE_APIM: + return [(INFRASTRUCTURE.SIMPLE_APIM, 1), (INFRASTRUCTURE.SIMPLE_APIM, 2)] + elif infra == INFRASTRUCTURE.APIM_ACA: + return [(INFRASTRUCTURE.APIM_ACA, None)] + return [] + + monkeypatch.setattr(nb_helper, '_find_infrastructure_instances', mock_find_instances) + monkeypatch.setattr(utils, 'print_info', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_success', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'get_infra_rg_name', lambda infra, idx: f'apim-infra-{infra.value}-{idx or ""}') + monkeypatch.setattr('builtins.print', lambda *args, **kwargs: None) + + # Mock user input to select option 1 (APIM_ACA with no index, sorted first alphabetically) + monkeypatch.setattr('builtins.input', lambda prompt: '1') + + result = nb_helper._query_and_select_infrastructure() + assert result == (INFRASTRUCTURE.APIM_ACA, None) + +def test_query_and_select_infrastructure_user_cancellation(monkeypatch): + """Test _query_and_select_infrastructure when user cancels selection.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock single result + def mock_find_instances(infra): + return [(INFRASTRUCTURE.SIMPLE_APIM, 1)] + + monkeypatch.setattr(nb_helper, '_find_infrastructure_instances', mock_find_instances) + monkeypatch.setattr(utils, 'print_info', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_warning', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'get_infra_rg_name', lambda infra, idx: f'apim-infra-{infra.value}-{idx}') + monkeypatch.setattr('builtins.print', lambda *args, **kwargs: None) + + # Mock user input to press Enter (cancel) + monkeypatch.setattr('builtins.input', lambda prompt: '') + + result = nb_helper._query_and_select_infrastructure() + assert result == (None, None) + +def test_query_and_select_infrastructure_invalid_input_then_valid(monkeypatch): + """Test _query_and_select_infrastructure with invalid input followed by valid input.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock single result + def mock_find_instances(infra): + return [(INFRASTRUCTURE.SIMPLE_APIM, 1)] + + monkeypatch.setattr(nb_helper, '_find_infrastructure_instances', mock_find_instances) + monkeypatch.setattr(utils, 'print_info', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_error', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_success', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'get_infra_rg_name', lambda infra, idx: f'apim-infra-{infra.value}-{idx}') + monkeypatch.setattr('builtins.print', lambda *args, **kwargs: None) + + # Mock user input sequence: invalid number, invalid text, then valid choice + inputs = iter(['99', 'abc', '1']) + monkeypatch.setattr('builtins.input', lambda prompt: next(inputs)) + + result = nb_helper._query_and_select_infrastructure() + assert result == (INFRASTRUCTURE.SIMPLE_APIM, 1) + +def test_query_and_select_infrastructure_keyboard_interrupt(monkeypatch): + """Test _query_and_select_infrastructure when user presses Ctrl+C.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock single result + def mock_find_instances(infra): + return [(INFRASTRUCTURE.SIMPLE_APIM, 1)] + + monkeypatch.setattr(nb_helper, '_find_infrastructure_instances', mock_find_instances) + monkeypatch.setattr(utils, 'print_info', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_warning', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'get_infra_rg_name', lambda infra, idx: f'apim-infra-{infra.value}-{idx}') + monkeypatch.setattr('builtins.print', lambda *args, **kwargs: None) + + # Mock user input to raise KeyboardInterrupt + def mock_input(prompt): + raise KeyboardInterrupt() + monkeypatch.setattr('builtins.input', mock_input) + + result = nb_helper._query_and_select_infrastructure() + assert result == (None, None) + +def test_deploy_bicep_with_infrastructure_selection(monkeypatch): + """Test deploy_bicep method with infrastructure selection when original doesn't exist.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM, INFRASTRUCTURE.APIM_ACA] + ) + + # Mock does_resource_group_exist to return False for original, triggering selection + monkeypatch.setattr(utils, 'does_resource_group_exist', lambda rg: False) + + # Mock infrastructure selection to return a valid infrastructure + selected_infra = INFRASTRUCTURE.APIM_ACA + selected_index = 2 + monkeypatch.setattr(nb_helper, '_query_and_select_infrastructure', + lambda: (selected_infra, selected_index)) + + # Mock successful deployment + mock_output = utils.Output(success=True, text='{"outputs": {"test": "value"}}') + monkeypatch.setattr(utils, 'create_bicep_deployment_group_for_sample', + lambda *args, **kwargs: mock_output) + + # Mock utility functions + monkeypatch.setattr(utils, 'get_infra_rg_name', + lambda infra, idx: f'apim-infra-{infra.value}-{idx}') + monkeypatch.setattr(utils, 'print_error', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_success', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_val', lambda *args, **kwargs: None) + + # Test the deployment + result = nb_helper.deploy_bicep({'test': {'value': 'param'}}) + + # Verify the helper was updated with selected infrastructure + assert nb_helper.deployment == selected_infra + assert nb_helper.rg_name == 'apim-infra-apim-aca-2' + assert result.success is True + +def test_deploy_bicep_no_infrastructure_found(monkeypatch): + """Test deploy_bicep method when no suitable infrastructure is found.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock does_resource_group_exist to return False for original + monkeypatch.setattr(utils, 'does_resource_group_exist', lambda rg: False) + + # Mock infrastructure selection to return None (no infrastructure found) + monkeypatch.setattr(nb_helper, '_query_and_select_infrastructure', + lambda: (None, None)) + + # Mock utility functions + monkeypatch.setattr(utils, 'print_error', lambda *args, **kwargs: None) + + # Test should raise SystemExit + with pytest.raises(SystemExit): + nb_helper.deploy_bicep({'test': {'value': 'param'}}) + +def test_deploy_bicep_existing_infrastructure(monkeypatch): + """Test deploy_bicep method when the specified infrastructure already exists.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock does_resource_group_exist to return True (infrastructure exists) + monkeypatch.setattr(utils, 'does_resource_group_exist', lambda rg: True) + + # Mock successful deployment + mock_output = utils.Output(success=True, text='{"outputs": {"test": "value"}}') + monkeypatch.setattr(utils, 'create_bicep_deployment_group_for_sample', + lambda *args, **kwargs: mock_output) + + # Mock utility functions + monkeypatch.setattr(utils, 'print_success', lambda *args, **kwargs: None) + + # Test the deployment - should not call infrastructure selection + result = nb_helper.deploy_bicep({'test': {'value': 'param'}}) + + # Verify the helper was not modified (still has original values) + assert nb_helper.deployment == INFRASTRUCTURE.SIMPLE_APIM + assert nb_helper.rg_name == 'test-rg' + assert result.success is True + +def test_deploy_bicep_deployment_failure(monkeypatch): + """Test deploy_bicep method when Bicep deployment fails.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM] + ) + + # Mock does_resource_group_exist to return True + monkeypatch.setattr(utils, 'does_resource_group_exist', lambda rg: True) + + # Mock failed deployment + mock_output = utils.Output(success=False, text='Deployment failed') + monkeypatch.setattr(utils, 'create_bicep_deployment_group_for_sample', + lambda *args, **kwargs: mock_output) + + # Test should raise SystemExit + with pytest.raises(SystemExit): + nb_helper.deploy_bicep({'test': {'value': 'param'}}) + +def test_notebookhelper_initialization_with_supported_infrastructures(): + """Test NotebookHelper initialization with supported infrastructures list.""" + supported_infras = [INFRASTRUCTURE.SIMPLE_APIM, INFRASTRUCTURE.APIM_ACA] + + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, supported_infras + ) + + assert nb_helper.deployment == INFRASTRUCTURE.SIMPLE_APIM + assert nb_helper.supported_infrastructures == supported_infras + assert nb_helper.sample_folder == 'test-sample' + assert nb_helper.rg_name == 'test-rg' + assert nb_helper.rg_location == 'eastus' + assert nb_helper.use_jwt is False + +def test_notebookhelper_initialization_with_jwt(monkeypatch): + """Test NotebookHelper initialization with JWT enabled.""" + # Mock JWT-related functions + monkeypatch.setattr(utils, 'generate_signing_key', lambda: ('test-key', 'test-key-b64')) + monkeypatch.setattr(utils, 'print_val', lambda *args, **kwargs: None) + monkeypatch.setattr('time.time', lambda: 1234567890) + + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM], use_jwt=True + ) + + assert nb_helper.use_jwt is True + assert nb_helper.jwt_key_name == 'JwtSigningKey-test-sample-1234567890' + assert nb_helper.jwt_key_value == 'test-key' + assert nb_helper.jwt_key_value_bytes_b64 == 'test-key-b64' + +def test_infrastructure_sorting_in_query_and_select(monkeypatch): + """Test that infrastructure options are sorted correctly by type then index.""" + nb_helper = utils.NotebookHelper( + 'test-sample', 'test-rg', 'eastus', + INFRASTRUCTURE.SIMPLE_APIM, [INFRASTRUCTURE.SIMPLE_APIM, INFRASTRUCTURE.APIM_ACA, INFRASTRUCTURE.AFD_APIM_PE] + ) + + # Mock mixed results in unsorted order + def mock_find_instances(infra): + if infra == INFRASTRUCTURE.SIMPLE_APIM: + return [(INFRASTRUCTURE.SIMPLE_APIM, 3), (INFRASTRUCTURE.SIMPLE_APIM, 1)] + elif infra == INFRASTRUCTURE.APIM_ACA: + return [(INFRASTRUCTURE.APIM_ACA, None), (INFRASTRUCTURE.APIM_ACA, 2)] + elif infra == INFRASTRUCTURE.AFD_APIM_PE: + return [(INFRASTRUCTURE.AFD_APIM_PE, 1)] + return [] + + monkeypatch.setattr(nb_helper, '_find_infrastructure_instances', mock_find_instances) + monkeypatch.setattr(utils, 'print_info', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'print_success', lambda *args, **kwargs: None) + monkeypatch.setattr(utils, 'get_infra_rg_name', lambda infra, idx: f'apim-infra-{infra.value}-{idx or ""}') + + # Capture the printed options to verify sorting + printed_options = [] + def mock_print(*args, **kwargs): + if args and isinstance(args[0], str) and args[0].strip().startswith(('1.', '2.', '3.', '4.', '5.')): + printed_options.append(args[0].strip()) + + monkeypatch.setattr('builtins.print', mock_print) + + # Mock user input to select first option + monkeypatch.setattr('builtins.input', lambda prompt: '1') + + nb_helper._query_and_select_infrastructure() + + # Verify sorting: AFD_APIM_PE (alphabetically first), then APIM_ACA, then SIMPLE_APIM + # Within each type, sorted by index (None treated as 0) + expected_order = [ + '1. afd-apim-pe (index: 1)', + '2. apim-aca - Resource Group:', # No index + '3. apim-aca (index: 2)', + '4. simple-apim (index: 1)', + '5. simple-apim (index: 3)' + ] + + for i, expected in enumerate(expected_order): + assert expected in printed_options[i], f"Expected '{expected}' in '{printed_options[i]}'"