Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions sdk/ai/azure-ai-projects/cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"inpainting",
"CSDL",
"fstring",
"aiprojectclient",
],
"ignorePaths": [
"*.csv",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------

"""
DESCRIPTION:
This sample demonstrates how to run a basic responses operation
using the synchronous OpenAI client. We do not use AIProjectClient
in this sample, but rather construct the OpenAI client directly.

See also https://platform.openai.com/docs/api-reference/responses/create?lang=python

USAGE:
python sample_responses_basic_without_aiprojectclient.py

Before running the sample:

pip install openai azure-identity python-dotenv

Set these environment variables with your own values:
1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview
page of your Azure AI Foundry portal.
2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
the "Models + endpoints" tab in your Azure AI Foundry project.
"""

import os
from dotenv import load_dotenv
from openai import OpenAI
from azure.identity import DefaultAzureCredential, get_bearer_token_provider

load_dotenv()

openai = OpenAI(
api_key=get_bearer_token_provider(DefaultAzureCredential(), "https://ai.azure.com/.default"),
base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai",
default_query={"api-version": "2025-11-15-preview"},
)

response = openai.responses.create(
model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"],
input="How many feet are in a mile?",
)

print(f"Response output: {response.output_text}")
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------

"""
DESCRIPTION:
This sample demonstrates how to run a basic responses operation
using the asynchronous AsyncOpenAI client. We do not use AIProjectClient
in this sample, but rather construct the AsyncOpenAI client directly.

See also https://platform.openai.com/docs/api-reference/responses/create?lang=python

USAGE:
python sample_responses_basic_without_aiprojectclient_async.py

Before running the sample:

pip install openai azure-identity python-dotenv

Set these environment variables with your own values:
1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview
page of your Azure AI Foundry portal.
2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
the "Models + endpoints" tab in your Azure AI Foundry project.
"""

import asyncio
import os
from dotenv import load_dotenv
from openai import AsyncOpenAI
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider

load_dotenv()


async def main() -> None:

credential = DefaultAzureCredential()

async with credential:

openai = AsyncOpenAI(
api_key=get_bearer_token_provider(credential, "https://ai.azure.com/.default"),
base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai",
default_query={"api-version": "2025-11-15-preview"},
)

async with openai:

response = await openai.responses.create(
model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"],
input="How many feet are in a mile?",
)

print(f"Response output: {response.output_text}")


if __name__ == "__main__":
asyncio.run(main())
Loading