diff --git a/.github/workflows/deploy-waf.yml b/.github/workflows/deploy-waf.yml index 8ec0a5e19..0427638d0 100644 --- a/.github/workflows/deploy-waf.yml +++ b/.github/workflows/deploy-waf.yml @@ -109,13 +109,15 @@ jobs: --resource-group ${{ env.RESOURCE_GROUP_NAME }} \ --template-file infra/main.bicep \ --parameters \ - environmentName=${{ env.SOLUTION_PREFIX }} \ - useWafAlignedArchitecture=true \ - aiDeploymentsLocation='${{ env.AZURE_LOCATION }}' \ + solutionName=${{ env.SOLUTION_PREFIX }} \ + location="${{ env.AZURE_LOCATION }}" \ + azureAiServiceLocation='${{ env.AZURE_LOCATION }}' \ gptModelCapacity=5 \ - virtualMachineConfiguration='{"adminUsername": "adminuser", "adminPassword": "P@ssw0rd1234"}' \ - logAnalyticsWorkspaceConfiguration='{"existingWorkspaceResourceId": ""}' - + enableTelemetry=true \ + enableMonitoring=true \ + enablePrivateNetworking=true \ + enableScalability=true \ + - name: Send Notification on Failure if: failure() diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 747d8de53..82f0941b1 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -129,19 +129,14 @@ jobs: --resource-group ${{ env.RESOURCE_GROUP_NAME }} \ --template-file infra/main.bicep \ --parameters \ - environmentName=${{ env.SOLUTION_PREFIX }} \ - solutionLocation="${{ env.AZURE_LOCATION }}" \ - modelDeploymentType="GlobalStandard" \ + solutionName=${{ env.SOLUTION_PREFIX }} \ + location="${{ env.AZURE_LOCATION }}" \ + gptModelDeploymentType="GlobalStandard" \ gptModelName="gpt-4o" \ gptModelVersion="2024-08-06" \ imageTag="${IMAGE_TAG}" \ - useWafAlignedArchitecture=false \ - aiDeploymentsLocation='${{ env.AZURE_LOCATION }}' \ + azureAiServiceLocation='${{ env.AZURE_LOCATION }}' \ gptModelCapacity=150 \ - logAnalyticsWorkspaceConfiguration='{"dataRetentionInDays": 30, "existingWorkspaceResourceId": ""}' \ - applicationInsightsConfiguration='{"retentionInDays": 30}' \ - virtualNetworkConfiguration='{"enabled": false}' \ - webServerFarmConfiguration='{"skuCapacity": 1, "skuName": "B2"}' \ --output json - name: Extract Web App and API App URLs diff --git a/docs/DeploymentGuide.md b/docs/DeploymentGuide.md index 362c64c5a..18442dfc7 100644 --- a/docs/DeploymentGuide.md +++ b/docs/DeploymentGuide.md @@ -153,7 +153,8 @@ When you start the deployment, most parameters will have **default values**, but | **GPT Model Capacity** | Sets the GPT model capacity. | 150 | | **Image Tag** | Docker image tag used for container deployments. | latest | | **Enable Telemetry** | Enables telemetry for monitoring and diagnostics. | true | - +| **Existing Log Analytics Workspace** | To reuse an existing Log Analytics Workspace ID instead of creating a new one. | *(none)* | +| **Existing Azure AI Foundry Project** | To reuse an existing Azure AI Foundry Project ID instead of creating a new one. | *(none)* | @@ -176,6 +177,14 @@ To adjust quota settings, follow these [steps](./AzureGPTQuotaSettings.md). +
+ + Reusing an Existing Azure AI Foundry Project + + Guide to get your [Existing Project ID](/docs/re-use-foundry-project.md) + +
+ ### Deploying with AZD Once you've opened the project in [Codespaces](#github-codespaces), [Dev Containers](#vs-code-dev-containers), or [locally](#local-environment), you can deploy it to Azure by following these steps: @@ -206,43 +215,9 @@ Once you've opened the project in [Codespaces](#github-codespaces), [Dev Contain 5. Once the deployment has completed successfully, open the [Azure Portal](https://portal.azure.com/), go to the deployed resource group, find the App Service, and get the app URL from `Default domain`. -6. If you are done trying out the application, you can delete the resources by running `azd down`. - -### Publishing Local Build Container to Azure Container Registry - -If you need to rebuild the source code and push the updated container to the deployed Azure Container Registry, follow these steps: - -1. Set the environment variable `USE_LOCAL_BUILD` to `True`: - - - **Linux/macOS**: - - ```bash - export USE_LOCAL_BUILD=True - ``` - - - **Windows (PowerShell)**: - ```powershell - $env:USE_LOCAL_BUILD = $true - ``` - -2. Run the `az login` command - - ```bash - az login - ``` - -3. Run the `azd up` command again to rebuild and push the updated container: - ```bash - azd up - ``` - -This will rebuild the source code, package it into a container, and push it to the Azure Container Registry associated with your deployment. - -This guide provides step-by-step instructions for deploying your application using Azure Container Registry (ACR) and Azure Container Apps. - -There are several ways to deploy the solution. You can deploy to run in Azure in one click, or manually, or you can deploy locally. +6. When Deployment is complete, follow steps in [Set Up Authentication in Azure App Service](../docs/azure_app_service_auth_setup.md) to add app authentication to your web app running on Azure App Service -When Deployment is complete, follow steps in [Set Up Authentication in Azure App Service](../docs/azure_app_service_auth_setup.md) to add app authentication to your web app running on Azure App Service +7. If you are done trying out the application, you can delete the resources by running `azd down`. # Local setup diff --git a/docs/images/re_use_foundry_project/azure_ai_foundry_list.png b/docs/images/re_use_foundry_project/azure_ai_foundry_list.png new file mode 100644 index 000000000..784bc85c7 Binary files /dev/null and b/docs/images/re_use_foundry_project/azure_ai_foundry_list.png differ diff --git a/docs/images/re_use_foundry_project/navigate_to_projects.png b/docs/images/re_use_foundry_project/navigate_to_projects.png new file mode 100644 index 000000000..11082c15c Binary files /dev/null and b/docs/images/re_use_foundry_project/navigate_to_projects.png differ diff --git a/docs/images/re_use_foundry_project/project_resource_id.png b/docs/images/re_use_foundry_project/project_resource_id.png new file mode 100644 index 000000000..7835ea9d3 Binary files /dev/null and b/docs/images/re_use_foundry_project/project_resource_id.png differ diff --git a/docs/re-use-foundry-project.md b/docs/re-use-foundry-project.md new file mode 100644 index 000000000..c29ac5d8a --- /dev/null +++ b/docs/re-use-foundry-project.md @@ -0,0 +1,44 @@ +[← Back to *DEPLOYMENT* guide](/docs/DeploymentGuide.md#deployment-steps) + +# Reusing an Existing Azure AI Foundry Project +To configure your environment to use an existing Azure AI Foundry Project, follow these steps: +--- +### 1. Go to Azure Portal +Go to https://portal.azure.com + +### 2. Search for Azure AI Foundry +In the search bar at the top, type "Azure AI Foundry" and click on it. Then select the Foundry service instance where your project exists. + +![alt text](../docs/images/re_use_foundry_project/azure_ai_foundry_list.png) + +### 3. Navigate to Projects under Resource Management +On the left sidebar of the Foundry service blade: + +- Expand the Resource Management section +- Click on Projects (this refers to the active Foundry project tied to the service) + +### 4. Click on the Project +From the Projects view: Click on the project name to open its details + + Note: You will see only one project listed here, as each Foundry service maps to a single project in this accelerator + +![alt text](../docs/images/re_use_foundry_project/navigate_to_projects.png) + +### 5. Copy Resource ID +In the left-hand menu of the project blade: + +- Click on Properties under Resource Management +- Locate the Resource ID field +- Click on the copy icon next to the Resource ID value + +![alt text](../docs/images/re_use_foundry_project/project_resource_id.png) + +### 6. Set the Foundry Project Resource ID in Your Environment +Run the following command in your terminal +```bash +azd env set AZURE_ENV_FOUNDRY_PROJECT_ID '' +``` +Replace `` with the value obtained from Step 5. + +### 7. Continue Deployment +Proceed with the next steps in the [deployment guide](/docs/DeploymentGuide.md#deployment-steps). diff --git a/docs/re-use-log-analytics.md b/docs/re-use-log-analytics.md index 9d48b0f92..1fa7a35df 100644 --- a/docs/re-use-log-analytics.md +++ b/docs/re-use-log-analytics.md @@ -1,4 +1,4 @@ -[← Back to *DEPLOYMENT* guide](/docs/DeploymentGuide.md#deployment-options--steps) +[← Back to *DEPLOYMENT* guide](/docs/DeploymentGuide.md#deployment-steps) # Reusing an Existing Log Analytics Workspace To configure your environment to use an existing Log Analytics Workspace, follow these steps: @@ -28,4 +28,4 @@ azd env set AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID '` with the value obtained from Step 3. ### 5. Continue Deployment -Proceed with the next steps in the [deployment guide](/docs/DeploymentGuide.md#deployment-options--steps). +Proceed with the next steps in the [deployment guide](/docs/DeploymentGuide.md#deployment-steps). diff --git a/infra/main.bicep b/infra/main.bicep index fc3cb1689..94556f60f 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -17,7 +17,7 @@ param solutionName string = 'macae' param solutionUniqueText string = take(uniqueString(subscription().id, resourceGroup().name, solutionName), 5) @metadata({ azd: { type: 'location' } }) -@description('Optional. Azure region for all services. Regions are restricted to guarantee compatibility with paired regions and replica locations for data redundancy and failover scenarios based on articles [Azure regions list](https://learn.microsoft.com/azure/reliability/regions-list) and [Azure Database for MySQL Flexible Server - Azure Regions](https://learn.microsoft.com/azure/mysql/flexible-server/overview#azure-regions).') +@description('Required. Azure region for all services. Regions are restricted to guarantee compatibility with paired regions and replica locations for data redundancy and failover scenarios based on articles [Azure regions list](https://learn.microsoft.com/azure/reliability/regions-list) and [Azure Database for MySQL Flexible Server - Azure Regions](https://learn.microsoft.com/azure/mysql/flexible-server/overview#azure-regions).') @allowed([ 'australiaeast' 'centralus' @@ -30,13 +30,38 @@ param solutionUniqueText string = take(uniqueString(subscription().id, resourceG 'westeurope' 'uksouth' ]) -param location string = 'australiaeast' +param location string // Restricting deployment to only supported Azure OpenAI regions validated with GPT-4o model @allowed(['australiaeast', 'eastus2', 'francecentral', 'japaneast', 'norwayeast', 'swedencentral', 'uksouth', 'westus']) -@metadata({ azd: { type: 'location' } }) -@description('Optional. Location for all AI service resources. This should be one of the supported Azure AI Service locations.') -param azureAiServiceLocation string = 'australiaeast' +@metadata({ + azd : { + type: 'location' + usageName : [ + 'OpenAI.GlobalStandard.gpt-4o, 150' + ] + } +}) +@description('Required. Location for all AI service resources. This should be one of the supported Azure AI Service locations.') +param azureAiServiceLocation string + +@minLength(1) +@description('Optional. Name of the GPT model to deploy:') +param gptModelName string = 'gpt-4o' + +@description('Optional. Version of the GPT model to deploy. Defaults to 2024-08-06.') +param gptModelVersion string = '2024-08-06' + +@minLength(1) +@allowed([ + 'Standard' + 'GlobalStandard' +]) +@description('Optional. GPT model deployment type. Defaults to GlobalStandard.') +param gptModelDeploymentType string = 'GlobalStandard' + +@description('Optional. AI model deployment token capacity. Defaults to 150 for optimal performance.') +param gptModelCapacity int = 150 @description('Optional. The tags to apply to all deployed Azure resources.') param tags resourceInput<'Microsoft.Resources/resourceGroups@2025-04-01'>.tags = {} @@ -86,7 +111,15 @@ param enableTelemetry bool = true // Variables // // ============== // -var solutionSuffix = '${solutionName}${solutionUniqueText}' +var solutionSuffix = toLower(trim(replace( + replace( + replace(replace(replace(replace('${solutionName}${solutionUniqueText}', '-', ''), '_', ''), '.', ''), '/', ''), + ' ', + '' + ), + '*', + '' +))) // Region pairs list based on article in [Azure regions list](https://learn.microsoft.com/azure/reliability/regions-list) // var azureRegionPairs = { @@ -895,12 +928,11 @@ var aiFoundryAiServicesAiProjectResourceName = 'proj-${solutionSuffix}' var aiFoundryAIservicesEnabled = true var aiFoundryAiServicesModelDeployment = { format: 'OpenAI' - name: 'gpt-4o' - version: '2024-08-06' + name: gptModelName + version: gptModelVersion sku: { - name: 'GlobalStandard' - //Currently the capacity is set to 140 for optimal performance. - capacity: 140 + name: gptModelDeploymentType + capacity: gptModelCapacity } raiPolicyName: 'Microsoft.Default' } @@ -1141,7 +1173,7 @@ module containerAppEnvironment 'br/public:avm/res/app/managed-environment:0.11.2 destination: 'log-analytics' logAnalyticsConfiguration: { customerId: logAnalyticsWorkspace!.outputs.logAnalyticsWorkspaceId - sharedKey: logAnalyticsWorkspace!.outputs.primarySharedKey + sharedKey: logAnalyticsWorkspace.outputs.primarySharedKey } } : null @@ -1331,6 +1363,10 @@ module containerApp 'br/public:avm/res/app/container-app:0.18.1' = { name: 'AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME' value: aiFoundryAiServicesModelDeployment.name } + { + name: 'AZURE_CLIENT_ID' + value: userAssignedIdentity.outputs.clientId // NOTE: This is the client ID of the managed identity, not the Entra application, and is needed for the App Service to access the Cosmos DB account. + } ] } ] diff --git a/infra/main.parameters.json b/infra/main.parameters.json index c52f902ee..0faa4c9ae 100644 --- a/infra/main.parameters.json +++ b/infra/main.parameters.json @@ -11,6 +11,18 @@ "azureAiServiceLocation": { "value": "${AZURE_ENV_OPENAI_LOCATION}" }, + "gptModelDeploymentType": { + "value": "${AZURE_ENV_MODEL_DEPLOYMENT_TYPE}" + }, + "gptModelName": { + "value": "${AZURE_ENV_MODEL_NAME}" + }, + "gptModelVersion": { + "value": "${AZURE_ENV_MODEL_VERSION}" + }, + "gptModelCapacity": { + "value": "${AZURE_ENV_MODEL_CAPACITY}" + }, "backendContainerImageTag": { "value": "${AZURE_ENV_IMAGE_TAG}" }, diff --git a/infra/main.waf.parameters.json b/infra/main.waf.parameters.json index c4b7d5ddc..66e52ec12 100644 --- a/infra/main.waf.parameters.json +++ b/infra/main.waf.parameters.json @@ -14,6 +14,18 @@ "azureAiServiceLocation": { "value": "${AZURE_ENV_OPENAI_LOCATION}" }, + "gptModelDeploymentType": { + "value": "${AZURE_ENV_MODEL_DEPLOYMENT_TYPE}" + }, + "gptModelName": { + "value": "${AZURE_ENV_MODEL_NAME}" + }, + "gptModelVersion": { + "value": "${AZURE_ENV_MODEL_VERSION}" + }, + "gptModelCapacity": { + "value": "${AZURE_ENV_MODEL_CAPACITY}" + }, "backendContainerImageTag": { "value": "${AZURE_ENV_IMAGE_TAG}" }, diff --git a/src/backend/app_config.py b/src/backend/app_config.py index fe2b9f90c..0f2871967 100644 --- a/src/backend/app_config.py +++ b/src/backend/app_config.py @@ -115,7 +115,7 @@ def get_cosmos_database_client(self): try: if self._cosmos_client is None: self._cosmos_client = CosmosClient( - self.COSMOSDB_ENDPOINT, credential=get_azure_credential() + self.COSMOSDB_ENDPOINT, credential=get_azure_credential(self.AZURE_CLIENT_ID) ) if self._cosmos_database is None: @@ -152,7 +152,7 @@ def get_ai_project_client(self): return self._ai_project_client try: - credential = get_azure_credential() + credential = get_azure_credential(self.AZURE_CLIENT_ID) if credential is None: raise RuntimeError( "Unable to acquire Azure credentials; ensure Managed Identity is configured" diff --git a/src/backend/app_kernel.py b/src/backend/app_kernel.py index e0e81abd1..0c0273b45 100644 --- a/src/backend/app_kernel.py +++ b/src/backend/app_kernel.py @@ -74,7 +74,7 @@ # Add this near the top of your app.py, after initializing the app app.add_middleware( CORSMiddleware, - allow_origins=[frontend_url], + allow_origins=[frontend_url], # Allow all origins for development; restrict in production allow_credentials=True, allow_methods=["*"], allow_headers=["*"], @@ -269,7 +269,7 @@ async def input_task_endpoint(input_task: InputTask, request: Request): if "Rate limit is exceeded" in error_msg: match = re.search(r"Rate limit is exceeded\. Try again in (\d+) seconds?\.", error_msg) if match: - error_msg = f"Rate limit is exceeded. Try again in {match.group(1)} seconds." + error_msg = "Application temporarily unavailable due to quota limits. Please try again later." track_event_if_configured( "InputTaskError", @@ -279,7 +279,7 @@ async def input_task_endpoint(input_task: InputTask, request: Request): "error": str(e), }, ) - raise HTTPException(status_code=400, detail=f"Error creating plan: {error_msg}") from e + raise HTTPException(status_code=400, detail=f"{error_msg}") from e @app.post("/api/human_feedback") diff --git a/src/backend/config_kernel.py b/src/backend/config_kernel.py index 598a88dc5..3fb92c1f0 100644 --- a/src/backend/config_kernel.py +++ b/src/backend/config_kernel.py @@ -32,7 +32,7 @@ class Config: @staticmethod def GetAzureCredentials(): """Get Azure credentials using the AppConfig implementation.""" - return get_azure_credential() + return get_azure_credential(config.AZURE_CLIENT_ID) @staticmethod def GetCosmosDatabaseClient(): diff --git a/src/backend/context/cosmos_memory_kernel.py b/src/backend/context/cosmos_memory_kernel.py index d547979da..e20cae00a 100644 --- a/src/backend/context/cosmos_memory_kernel.py +++ b/src/backend/context/cosmos_memory_kernel.py @@ -73,7 +73,7 @@ async def initialize(self): if not self._database: # Create Cosmos client cosmos_client = CosmosClient( - self._cosmos_endpoint, credential=get_azure_credential() + self._cosmos_endpoint, credential=get_azure_credential(config.AZURE_CLIENT_ID) ) self._database = cosmos_client.get_database_client( self._cosmos_database diff --git a/src/backend/pyproject.toml b/src/backend/pyproject.toml index e02186fdb..ba41839b0 100644 --- a/src/backend/pyproject.toml +++ b/src/backend/pyproject.toml @@ -8,6 +8,7 @@ dependencies = [ "azure-ai-evaluation>=1.5.0", "azure-ai-inference>=1.0.0b9", "azure-ai-projects>=1.0.0b9", + "azure-ai-agents>=1.2.0b1", "azure-cosmos>=4.9.0", "azure-identity>=1.21.0", "azure-monitor-events-extension>=0.1.0", diff --git a/src/backend/utils_kernel.py b/src/backend/utils_kernel.py index b6398ae2c..37753d171 100644 --- a/src/backend/utils_kernel.py +++ b/src/backend/utils_kernel.py @@ -172,7 +172,7 @@ async def rai_success(description: str, is_task_creation: bool) -> bool: """ try: # Use managed identity for authentication to Azure OpenAI - credential = get_azure_credential() + credential = get_azure_credential(config.AZURE_CLIENT_ID) access_token = credential.get_token( "https://cognitiveservices.azure.com/.default" ).token diff --git a/src/backend/uv.lock b/src/backend/uv.lock index 61b0afada..2f9a6fc21 100644 --- a/src/backend/uv.lock +++ b/src/backend/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.11" resolution-markers = [ "python_full_version >= '3.13'", @@ -224,6 +224,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/c0/44232f2e04358ecce33a1d9354f95683bb24262a788d008d8c9dafa3622d/av-14.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:f930faa2e6f6a46d55bc67545b81f5b22bd52975679c1de0f871fc9f8ca95711", size = 27433259, upload-time = "2025-04-06T10:21:53.567Z" }, ] +[[package]] +name = "azure-ai-agents" +version = "1.2.0b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/70/0aa275a7eecead1691bd86474514bc28787f815c37d1d79ac78be03a7612/azure_ai_agents-1.2.0b1.tar.gz", hash = "sha256:914e08e553ea4379d41ad60dbc8ea5468311d97f0ae1a362686229b8565ab8dd", size = 339933, upload-time = "2025-08-05T22:21:07.262Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/c2/4824f3cd3980f976c4dace59cb25ab1891b22626be5c80c4a96f0b9c0ba5/azure_ai_agents-1.2.0b1-py3-none-any.whl", hash = "sha256:c6862f2e6655072ee3f1f1489be2dc2bf6c0ad636ec4e7f33a5fca9cb5c8eadb", size = 202032, upload-time = "2025-08-05T22:21:08.668Z" }, +] + [[package]] name = "azure-ai-evaluation" version = "1.5.0" @@ -263,16 +277,18 @@ wheels = [ [[package]] name = "azure-ai-projects" -version = "1.0.0b10" +version = "1.1.0b2" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "azure-ai-agents" }, { name = "azure-core" }, + { name = "azure-storage-blob" }, { name = "isodate" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/2e/e6ab1f7c1b12fcef9549a797a575e3dd5a71297ce12b083a983311cd5069/azure_ai_projects-1.0.0b10.tar.gz", hash = "sha256:cdc8055305cec762f09f7581796ea97599d2a2fb26f2c8486f34f728d5bdc98a", size = 323251, upload-time = "2025-04-23T21:56:56.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/17/33664227381ff59690e16a8d3261c9edeb80d88acdb24b717733d63529bb/azure_ai_projects-1.1.0b2.tar.gz", hash = "sha256:79432e2de8b27f01aaad6d3f12e1549396f1c2a022665a859c45b179bf6ff228", size = 144848, upload-time = "2025-08-05T22:18:45.351Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/7c/e45b98dc298a706ac639064aec316730a534d0d49d27986d00ba4e23dced/azure_ai_projects-1.0.0b10-py3-none-any.whl", hash = "sha256:77cd7fdac5affc37c437e60f1e244a706c1151b1bf682c5a471b3d233978b647", size = 200755, upload-time = "2025-04-23T21:56:58.032Z" }, + { url = "https://files.pythonhosted.org/packages/26/2b/98f928ea41c03c78c02e1a72fc5e9c900d2e6e472cb51f9272cb0d4ba3bf/azure_ai_projects-1.1.0b2-py3-none-any.whl", hash = "sha256:3a4ecc6de6ab27a75b4c8228cd8162c9853fd1432e77746792b0ee2088c775db", size = 125301, upload-time = "2025-08-05T22:18:46.577Z" }, ] [[package]] @@ -429,6 +445,7 @@ name = "backend" version = "0.1.0" source = { virtual = "." } dependencies = [ + { name = "azure-ai-agents" }, { name = "azure-ai-evaluation" }, { name = "azure-ai-inference" }, { name = "azure-ai-projects" }, @@ -456,6 +473,7 @@ dependencies = [ [package.metadata] requires-dist = [ + { name = "azure-ai-agents", specifier = ">=1.2.0b1" }, { name = "azure-ai-evaluation", specifier = ">=1.5.0" }, { name = "azure-ai-inference", specifier = ">=1.0.0b9" }, { name = "azure-ai-projects", specifier = ">=1.0.0b9" }, @@ -477,7 +495,7 @@ requires-dist = [ { name = "pytest-cov", specifier = "==5.0.0" }, { name = "python-dotenv", specifier = ">=1.1.0" }, { name = "python-multipart", specifier = ">=0.0.20" }, - { name = "semantic-kernel", specifier = ">=1.28.1" }, + { name = "semantic-kernel", specifier = ">=1.32.2" }, { name = "uvicorn", specifier = ">=0.34.2" }, ] @@ -2939,11 +2957,13 @@ wheels = [ [[package]] name = "semantic-kernel" -version = "1.29.0" +version = "1.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "aiortc" }, + { name = "azure-ai-agents" }, + { name = "azure-ai-projects" }, { name = "azure-identity" }, { name = "cloudevents" }, { name = "defusedxml" }, @@ -2955,15 +2975,17 @@ dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, { name = "prance" }, + { name = "protobuf" }, { name = "pybars4" }, { name = "pydantic" }, { name = "pydantic-settings" }, { name = "scipy" }, + { name = "typing-extensions" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/fb/f12134e866867396d7706f9dff232900ec682240c8c646aab37f02479ef8/semantic_kernel-1.29.0.tar.gz", hash = "sha256:7a8e9da374c7ecc58f17aceda104d89aa35b8f5e21d080c2839a93c5b8c94450", size = 498588, upload-time = "2025-04-28T23:41:51.243Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/5c/4d761ff412c211260415f0e6683d22139b4ab990d9010c9962d1ec35d1b8/semantic_kernel-1.35.0.tar.gz", hash = "sha256:7fe49faaf7086263d3ac4cb42ec5d0b2344dcc21f0759bd6b79a92a7b4f8533f", size = 572339, upload-time = "2025-07-16T00:33:47.948Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/86/89e844020fbd0d37a2c60da611e2c3ee05fbf8dc0b38993cf804cc3c12d9/semantic_kernel-1.29.0-py3-none-any.whl", hash = "sha256:5157fb617ad5c069822db62906957396521d8813c24ce2057e7f652c53c88edf", size = 818108, upload-time = "2025-04-28T23:41:53.285Z" }, + { url = "https://files.pythonhosted.org/packages/b0/14/b0ddf679dae28393cf068401e8f953602adf78d1fe17504479ddf9f7afdf/semantic_kernel-1.35.0-py3-none-any.whl", hash = "sha256:ce2b9c313d53841448059833e885f082d136c54a113e687359b14c5e358c0e66", size = 875792, upload-time = "2025-07-16T00:33:45.891Z" }, ] [[package]]