Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions adalflow/adalflow/components/model_client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,12 @@
OptionalPackages.OPENAI,
)

# Azure OpenAI Client
AzureAIClient = LazyImport(
"adalflow.components.model_client.azureai_client.AzureAIClient",
OptionalPackages.AZURE,
)

__all__ = [
"CohereAPIClient",
"TransformerReranker",
Expand All @@ -76,6 +82,7 @@
"GroqAPIClient",
"OpenAIClient",
"GoogleGenAIClient",
"AzureAIClient",
]

for name in __all__:
Expand Down
108 changes: 30 additions & 78 deletions adalflow/adalflow/components/model_client/azureai_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,67 +129,12 @@ class AzureAIClient(ModelClient):
authentication. It is recommended to set environment variables for sensitive data like API keys.

Args:
api_key (Optional[str]): Azure OpenAI API key. Default is None.
api_version (Optional[str]): API version to use. Default is None.
azure_endpoint (Optional[str]): Azure OpenAI endpoint URL. Default is None.
credential (Optional[DefaultAzureCredential]): Azure AD credential for token-based authentication. Default is None.
chat_completion_parser (Callable[[Completion], Any]): Function to parse chat completions. Default is `get_first_message_content`.
input_type (Literal["text", "messages"]): Format for input, either "text" or "messages". Default is "text".

**Setup Instructions:**

- **Using API Key:**
Set up the following environment variables:
```bash
export AZURE_OPENAI_API_KEY="your_api_key"
export AZURE_OPENAI_ENDPOINT="your_endpoint"
export AZURE_OPENAI_VERSION="your_version"
```

- **Using Azure AD Token:**
Ensure you have configured Azure AD credentials. The `DefaultAzureCredential` will automatically use your configured credentials.

**Example Usage:**

.. code-block:: python

from azure.identity import DefaultAzureCredential
from your_module import AzureAIClient # Adjust import based on your module name

# Initialize with API key
client = AzureAIClient(
api_key="your_api_key",
api_version="2023-05-15",
azure_endpoint="https://your-endpoint.openai.azure.com/"
)

# Or initialize with Azure AD token
client = AzureAIClient(
api_version="2023-05-15",
azure_endpoint="https://your-endpoint.openai.azure.com/",
credential=DefaultAzureCredential()
)

# Example call to the chat completion API
api_kwargs = {
"model": "gpt-3.5-turbo",
"messages": [{"role": "user", "content": "What is the meaning of life?"}],
"stream": True
}
response = client.call(api_kwargs=api_kwargs, model_type=ModelType.LLM)

for chunk in response:
print(chunk)


**Notes:**
- Ensure that the API key or credentials are correctly set up and accessible to avoid authentication errors.
- Use `chat_completion_parser` to define how to extract and handle the chat completion responses.
- The `input_type` parameter determines how input is formatted for the API call.

**References:**
- [Azure OpenAI API Documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview)
- [OpenAI API Documentation](https://platform.openai.com/docs/guides/text-generation)
api_key: Azure OpenAI API key.
api_version: Azure OpenAI API version.
azure_endpoint: Azure OpenAI endpoint.
credential: Azure AD credential for token-based authentication.
chat_completion_parser: Function to parse chat completions.
input_type: Input format, either "text" or "messages".
"""

def __init__(
Expand All @@ -201,22 +146,11 @@ def __init__(
chat_completion_parser: Callable[[Completion], Any] = None,
input_type: Literal["text", "messages"] = "text",
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add the model_type in the client init

):
r"""It is recommended to set the API_KEY into the environment variable instead of passing it as an argument.


Initializes the Azure OpenAI client with either API key or AAD token authentication.

Args:
api_key: Azure OpenAI API key.
api_version: Azure OpenAI API version.
azure_endpoint: Azure OpenAI endpoint.
credential: Azure AD credential for token-based authentication.
chat_completion_parser: Function to parse chat completions.
input_type: Input format, either "text" or "messages".

"""
super().__init__()

# Model type will be set dynamically based on the operation
self._model_type = None

# added api_type azure for azure Ai
self.api_type = "azure"
self._api_key = api_key
Expand All @@ -230,6 +164,16 @@ def __init__(
)
self._input_type = input_type

@property
def model_type(self) -> ModelType:
"""Get the current model type. Defaults to LLM if not set."""
return self._model_type or ModelType.LLM

@model_type.setter
def model_type(self, value: ModelType):
"""Set the model type."""
self._model_type = value

def init_sync_client(self):
api_key = self._api_key or os.getenv("AZURE_OPENAI_API_KEY")
azure_endpoint = self._azure_endpoint or os.getenv("AZURE_OPENAI_ENDPOINT")
Expand Down Expand Up @@ -357,6 +301,10 @@ def convert_inputs_to_api_kwargs(
"""

final_model_kwargs = model_kwargs.copy()
# If model_type is UNDEFINED, use the current model_type property
if model_type == ModelType.UNDEFINED:
model_type = self.model_type

if model_type == ModelType.EMBEDDER:
if isinstance(input, str):
input = [input]
Expand All @@ -383,14 +331,13 @@ def convert_inputs_to_api_kwargs(
if match:
system_prompt = match.group(1)
input_str = match.group(2)

else:
print("No match found.")
if system_prompt and input_str:
messages.append({"role": "system", "content": system_prompt})
messages.append({"role": "user", "content": input_str})
if len(messages) == 0:
messages.append({"role": "system", "content": input})
messages.append({"role": "user", "content": input})
final_model_kwargs["messages"] = messages
else:
raise ValueError(f"model_type {model_type} is not supported")
Expand All @@ -409,8 +356,13 @@ def convert_inputs_to_api_kwargs(
)
def call(self, api_kwargs: Dict = {}, model_type: ModelType = ModelType.UNDEFINED):
"""
kwargs is the combined input and model_kwargs. Support streaming call.
kwargs is the combined input and model_kwargs. Support streaming call.
Also updates the internal model_type based on the operation.
"""
# Update internal model type based on the operation
if model_type != ModelType.UNDEFINED:
self.model_type = model_type

log.info(f"api_kwargs: {api_kwargs}")
if model_type == ModelType.EMBEDDER:
return self.sync_client.embeddings.create(**api_kwargs)
Expand Down
89 changes: 89 additions & 0 deletions examples/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# Azure AI Client Example

This example demonstrates how to use the AdalFlow Azure AI client with both API key and Azure AD authentication methods.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

move these examples to tutorials. and use rst


## Prerequisites

1. Install the required packages:
```bash
pip install adalflow azure-identity python-dotenv
```

2. Set up your Azure OpenAI service and get the necessary credentials:
- API key authentication: Get your API key from the Azure portal
- Azure AD authentication: Set up an Azure AD application and get the client ID, tenant ID, and client secret

3. Configure your environment variables by copying the `.env.example` file to `.env` and filling in your values:
```bash
cp .env.example .env
```

## Environment Variables

Edit the `.env` file and fill in your values:

```env
# Azure OpenAI API Configuration
AZURE_OPENAI_API_KEY="your_api_key_here"
AZURE_OPENAI_ENDPOINT="https://your-endpoint.openai.azure.com/"
AZURE_OPENAI_VERSION="2024-02-15-preview"
# Azure AD Authentication (Optional - if using AAD auth)
AZURE_CLIENT_ID="your_client_id_here"
AZURE_TENANT_ID="your_tenant_id_here"
AZURE_CLIENT_SECRET="your_client_secret_here"
# Azure Model Deployment
AZURE_MODEL_NAME="your_model_deployment_name"
AZURE_OPENAI_EMBEDDING_DEPLOYMENT="text-embedding-ada-002"
```

## Running the Example

The example script demonstrates:
1. Chat completion with API key authentication
2. Streaming chat completion
3. Text embeddings
4. Chat completion with Azure AD authentication

To run the example:

```bash
python azure_client_example.py
```

## Features Demonstrated

1. **Multiple Authentication Methods**:
- API key-based authentication
- Azure AD authentication using DefaultAzureCredential

2. **Chat Completions**:
- Regular chat completion
- Streaming chat completion
- System and user message handling

3. **Text Embeddings**:
- Generate embeddings for multiple texts
- Embedding dimension output

## Example Output

You should see output similar to this:

```
Testing with API key authentication:
=== Testing Chat Completion ===
[Response from the model about Paris tourist attractions]
=== Testing Chat Completion (Streaming) ===
[Streamed response from the model]
=== Testing Embeddings ===
Generated 2 embeddings
Embedding dimension: 1536
Testing with Azure AD authentication:
[Response from the model using AAD authentication]
```
Loading
Loading