Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 18 additions & 5 deletions infra/main.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ param environmentName string
param existingLogAnalyticsWorkspaceId string = ''

@description('CosmosDB Location')
param cosmosLocation string
param cosmosLocation string = 'eastus2'

@minLength(1)
@description('GPT model deployment type:')
Expand All @@ -33,7 +33,7 @@ param azureOpenaiAPIVersion string = '2025-04-01-preview'
@description('Capacity of the GPT deployment:')
// You can increase this, but capacity is limited per model/region, so you will get errors if you go over
// https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits
param gptDeploymentCapacity int = 30
param gptDeploymentCapacity int = 200

@minLength(1)
@description('Name of the Text Embedding model to deploy:')
Expand All @@ -53,14 +53,27 @@ param imageTag string = 'latest'

//restricting to these regions because assistants api for gpt-4o-mini is available only in these regions
@allowed(['australiaeast','eastus', 'eastus2','francecentral','japaneast','swedencentral','uksouth', 'westus', 'westus3'])
@description('Azure OpenAI Location')
param AzureOpenAILocation string = 'eastus2'
// @description('Azure OpenAI Location')
// param AzureOpenAILocation string = 'eastus2'

@metadata({
azd:{
type: 'location'
usageName: [
'OpenAI.GlobalStandard.gpt-4o-mini,200'
'OpenAI.Standard.text-embedding-ada-002,80'
]
}
})
@description('Location for AI Foundry deployment. This is the location where the AI Foundry resources will be deployed.')
param aiDeploymentsLocation string

@description('Set this if you want to deploy to a different region than the resource group. Otherwise, it will use the resource group location by default.')
param AZURE_LOCATION string=''
var solutionLocation = empty(AZURE_LOCATION) ? resourceGroup().location : AZURE_LOCATION

var uniqueId = toLower(uniqueString(environmentName, subscription().id, solutionLocation))

var solutionPrefix = 'ca${padLeft(take(uniqueId, 12), 12, '0')}'

// Load the abbrevations file required to name the azure resources.
Expand Down Expand Up @@ -133,7 +146,7 @@ module aifoundry 'deploy_ai_foundry.bicep' = {
name: 'deploy_ai_foundry'
params: {
solutionName: solutionPrefix
solutionLocation: AzureOpenAILocation
solutionLocation: aiDeploymentsLocation
keyVaultName: keyvaultModule.outputs.keyvaultName
deploymentType: deploymentType
gptModelName: gptModelName
Expand Down
14 changes: 0 additions & 14 deletions infra/main.bicepparam

This file was deleted.

39 changes: 39 additions & 0 deletions infra/main.parameters.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"environmentName": {
"value": "${AZURE_ENV_NAME}"
},
"cosmosLocation": {
"value": "${AZURE_ENV_COSMOS_LOCATION}"
},
"deploymentType": {
"value": "${AZURE_ENV_MODEL_DEPLOYMENT_TYPE}"
},
"gptModelName": {
"value": "${AZURE_ENV_MODEL_NAME}"
},
"azureOpenaiAPIVersion": {
"value": "${AZURE_ENV_MODEL_VERSION}"
},
"gptDeploymentCapacity": {
"value": "${AZURE_ENV_MODEL_CAPACITY}"
},
"embeddingModel": {
"value": "${AZURE_ENV_EMBEDDING_MODEL_NAME}"
},
"embeddingDeploymentCapacity": {
"value": "${AZURE_ENV_EMBEDDING_MODEL_CAPACITY}"
},
"imageTag": {
"value": "${AZURE_ENV_IMAGETAG}"
},
"location": {
"value": "${AZURE_LOCATION}"
},
"existingLogAnalyticsWorkspaceId": {
"value": "${AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID}"
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -87,13 +87,15 @@ export function ChatHistoryPanel(_props: ChatHistoryPanelProps) {

const onClearAllChatHistory = async () => {
setClearing(true)
appStateContext?.dispatch({ type: 'TOGGLE_LOADER' });
const response = await historyDeleteAll()
if (!response.ok) {
setClearingError(true)
} else {
appStateContext?.dispatch({ type: 'DELETE_CHAT_HISTORY' })
toggleClearAllDialog()
}
appStateContext?.dispatch({ type: 'TOGGLE_LOADER' });
setClearing(false)
}

Expand Down
Loading