Skip to content

Commit be3f29f

Browse files
authored
Add samples for "responses" without using AIProjectClient (#43820)
1 parent cc9b09a commit be3f29f

File tree

3 files changed

+107
-0
lines changed

3 files changed

+107
-0
lines changed

sdk/ai/azure-ai-projects/cspell.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
"inpainting",
1818
"CSDL",
1919
"fstring",
20+
"aiprojectclient",
2021
],
2122
"ignorePaths": [
2223
"*.csv",
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
# ------------------------------------
2+
# Copyright (c) Microsoft Corporation.
3+
# Licensed under the MIT License.
4+
# ------------------------------------
5+
6+
"""
7+
DESCRIPTION:
8+
This sample demonstrates how to run a basic responses operation
9+
using the synchronous OpenAI client. We do not use AIProjectClient
10+
in this sample, but rather construct the OpenAI client directly.
11+
12+
See also https://platform.openai.com/docs/api-reference/responses/create?lang=python
13+
14+
USAGE:
15+
python sample_responses_basic_without_aiprojectclient.py
16+
17+
Before running the sample:
18+
19+
pip install openai azure-identity python-dotenv
20+
21+
Set these environment variables with your own values:
22+
1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview
23+
page of your Azure AI Foundry portal.
24+
2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
25+
the "Models + endpoints" tab in your Azure AI Foundry project.
26+
"""
27+
28+
import os
29+
from dotenv import load_dotenv
30+
from openai import OpenAI
31+
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
32+
33+
load_dotenv()
34+
35+
openai = OpenAI(
36+
api_key=get_bearer_token_provider(DefaultAzureCredential(), "https://ai.azure.com/.default"),
37+
base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai",
38+
default_query={"api-version": "2025-11-15-preview"},
39+
)
40+
41+
response = openai.responses.create(
42+
model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"],
43+
input="How many feet are in a mile?",
44+
)
45+
46+
print(f"Response output: {response.output_text}")
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
# ------------------------------------
2+
# Copyright (c) Microsoft Corporation.
3+
# Licensed under the MIT License.
4+
# ------------------------------------
5+
6+
"""
7+
DESCRIPTION:
8+
This sample demonstrates how to run a basic responses operation
9+
using the asynchronous AsyncOpenAI client. We do not use AIProjectClient
10+
in this sample, but rather construct the AsyncOpenAI client directly.
11+
12+
See also https://platform.openai.com/docs/api-reference/responses/create?lang=python
13+
14+
USAGE:
15+
python sample_responses_basic_without_aiprojectclient_async.py
16+
17+
Before running the sample:
18+
19+
pip install openai azure-identity python-dotenv
20+
21+
Set these environment variables with your own values:
22+
1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview
23+
page of your Azure AI Foundry portal.
24+
2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
25+
the "Models + endpoints" tab in your Azure AI Foundry project.
26+
"""
27+
28+
import asyncio
29+
import os
30+
from dotenv import load_dotenv
31+
from openai import AsyncOpenAI
32+
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
33+
34+
load_dotenv()
35+
36+
37+
async def main() -> None:
38+
39+
credential = DefaultAzureCredential()
40+
41+
async with credential:
42+
43+
openai = AsyncOpenAI(
44+
api_key=get_bearer_token_provider(credential, "https://ai.azure.com/.default"),
45+
base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai",
46+
default_query={"api-version": "2025-11-15-preview"},
47+
)
48+
49+
async with openai:
50+
51+
response = await openai.responses.create(
52+
model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"],
53+
input="How many feet are in a mile?",
54+
)
55+
56+
print(f"Response output: {response.output_text}")
57+
58+
59+
if __name__ == "__main__":
60+
asyncio.run(main())

0 commit comments

Comments
 (0)