diff --git a/extensions/chat-with-content/README.md b/extensions/chat-with-content/README.md index 6bc28efe..c6a0a66f 100644 --- a/extensions/chat-with-content/README.md +++ b/extensions/chat-with-content/README.md @@ -21,7 +21,7 @@ As a Posit Connect administrator, you need to configure the environment for this 1. **Publish the Extension**: Publish this application to Posit Connect. -2. **Configure Environment Variables**: In the "Vars" pane of the content settings, you need to set environment variables to configure the LLM provider. This extension uses the `chatlas` library, which supports various LLM providers like OpenAI, Google Gemini, and Anthropic on AWS Bedrock. +2. **Configure Environment Variables**: In the "Vars" pane of the content settings, you need to set environment variables to configure the LLM provider. This extension uses the `chatlas` library, which supports various LLM providers like OpenAI, Azure OpenAI, Google Gemini, Anthropic, and Anthropic on AWS Bedrock. Set `CHATLAS_CHAT_PROVIDER_MODEL` to specify the provider and model in the format `provider/model`. You also need to provide the API key for the chosen service. @@ -40,6 +40,13 @@ As a Posit Connect administrator, you need to configure the environment for this - `CHATLAS_CHAT_PROVIDER_MODEL`: `anthropic/claude-sonnet-4-20250514` - `ANTHROPIC_API_KEY`: `` (Set this as a secret) + **Example for Azure OpenAI:** + + - `CHATLAS_CHAT_PROVIDER_MODEL`: `azure-openai/{deployment_id}` (e.g., `azure-openai/gpt-4.1-mini`) + - `AZURE_OPENAI_ENDPOINT`: `https://{your-resource-name}.openai.azure.com` + - `AZURE_OPENAI_API_KEY`: `` (Set this as a secret) + - `CHATLAS_CHAT_ARGS`: `{"api_version": "2025-03-01-preview"}` (Required — see [Azure OpenAI API versions](https://learn.microsoft.com/en-us/azure/ai-services/openai/api-version-deprecation)) + **Example for Anthropic on AWS Bedrock:** The application uses the [botocore](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/credentials.html) credential chain for AWS authentication. If the Connect server is running on an EC2 instance with an IAM role that grants access to Bedrock, credentials are automatically detected and no configuration is needed. In this case, the application uses the `us.anthropic.claude-sonnet-4-20250514-v1:0` model by default. @@ -51,7 +58,7 @@ As a Posit Connect administrator, you need to configure the environment for this - `AWS_REGION`: `` (e.g., `us-east-1`) - `AWS_SESSION_TOKEN`: `` (Optional, for temporary credentials) - For more details on supported providers and their arguments, see the [Chatlas documentation](https://posit-dev.github.io/chatlas/reference/ChatAuto.html). + For more details on supported providers and their arguments, see the [chatlas documentation](https://posit-dev.github.io/chatlas/reference/ChatAuto.html). 3. **Enable Visitor API Key Integration**: This extension requires access to the Connect API on behalf of the visiting user to list their available content. In the "Access" pane of the content settings, add a "Connect Visitor API Key" integration. diff --git a/extensions/chat-with-content/app.py b/extensions/chat-with-content/app.py index e7aca963..a0b232b6 100644 --- a/extensions/chat-with-content/app.py +++ b/extensions/chat-with-content/app.py @@ -2,7 +2,7 @@ from posit import connect from posit.connect.content import ContentItem from posit.connect.errors import ClientError -from chatlas import ChatAuto, ChatBedrockAnthropic, SystemTurn, UserTurn +from chatlas import ChatAuto, ChatAzureOpenAI, ChatBedrockAnthropic, SystemTurn, UserTurn import markdownify from shiny import App, Inputs, Outputs, Session, ui, reactive, render @@ -127,7 +127,7 @@ def fetch_connect_content_list(client: connect.Client): ui.HTML( "This app requires the CHATLAS_CHAT_PROVIDER_MODEL environment variable to be " "set along with an LLM API Key in the content access panel. Please set them in your environment before running the app. " - 'See the documentation for more details on which arguments can be set for each Chatlas provider.' + 'See the documentation for more details on which arguments can be set for each Chatlas provider.' ), class_="setup-description", ), @@ -137,6 +137,13 @@ def fetch_connect_content_list(client: connect.Client): OPENAI_API_KEY = "" """, class_="setup-code-block", ), + ui.div( + ui.HTML( + 'For other provider examples (Azure OpenAI, Anthropic, AWS Bedrock, etc.), see the ' + 'README.' + ), + class_="setup-description", + ), ui.h2("Connect Visitor API Key", class_="setup-section-title"), ui.div( "Before you are able to use this app, you need to add a Connect Visitor API Key integration in the access panel.", @@ -197,6 +204,7 @@ def fetch_connect_content_list(client: connect.Client): CHATLAS_CHAT_PROVIDER = os.getenv("CHATLAS_CHAT_PROVIDER") CHATLAS_CHAT_ARGS = os.getenv("CHATLAS_CHAT_ARGS") CHATLAS_CHAT_PROVIDER_MODEL = os.getenv("CHATLAS_CHAT_PROVIDER_MODEL") +IS_AZURE_OPENAI = (CHATLAS_CHAT_PROVIDER_MODEL or "").startswith("azure-openai/") HAS_AWS_CREDENTIALS = check_aws_bedrock_credentials() @@ -235,7 +243,21 @@ def server(input: Inputs, output: Outputs, session: Session): """ - if (CHATLAS_CHAT_PROVIDER_MODEL or CHATLAS_CHAT_PROVIDER) and not HAS_AWS_CREDENTIALS: + if IS_AZURE_OPENAI and not HAS_AWS_CREDENTIALS: + # Azure OpenAI requires deployment id instead of model. + + import json + + deployment_id = CHATLAS_CHAT_PROVIDER_MODEL.split("/", 1)[1] + chat_args = json.loads(CHATLAS_CHAT_ARGS or "{}") + chat = ChatAzureOpenAI( + endpoint=os.environ["AZURE_OPENAI_ENDPOINT"], + deployment_id=deployment_id, + api_version=chat_args.get("api_version", "2025-03-01-preview"), + api_key=os.getenv("AZURE_OPENAI_API_KEY"), + system_prompt=system_prompt, + ) + elif (CHATLAS_CHAT_PROVIDER_MODEL or CHATLAS_CHAT_PROVIDER) and not HAS_AWS_CREDENTIALS: # This will pull its configuration from environment variables # CHATLAS_CHAT_PROVIDER_MODEL, or the deprecated CHATLAS_CHAT_PROVIDER and CHATLAS_CHAT_ARGS chat = ChatAuto(