Skip to content

Commit 035ee80

Browse files
authored
Beta 2 - Azure AI Projects SDK (Azure#38578)
1 parent a6ffbb5 commit 035ee80

File tree

13 files changed

+361
-222
lines changed

13 files changed

+361
-222
lines changed

sdk/ai/azure-ai-projects/CHANGELOG.md

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
# Release History
22

3-
## 1.0.0b2 (Unreleased)
3+
## 1.0.0b2 (2024-12-03)
44

55
### Bugs Fixed
66

7-
* Fixed bugs that were causing issues with tracing agent asynchronous functionality
8-
* Fix a bug causing warning about unclosed session, shown when using asynchronous credentials to create agent.
9-
* Fix a bug that would cause agent function tool related function names and parameters to be included in traces even when content recording is not enabled
7+
* Fix a bug in the `.inference` operations when Entra ID authentication is used by the default connection.
8+
* Fixed bugs occurring during streaming in function tool calls by asynchronous agents.
9+
* Fixed bugs that were causing issues with tracing agent asynchronous functionality.
10+
* Fix a bug causing warning about unclosed session, shown when using asynchronous credentials to create agent.
11+
* Fix a bug that would cause agent function tool related function names and parameters to be included in traces even when content recording is not enabled.
1012

1113
## 1.0.0b1 (2024-11-15)
1214

sdk/ai/azure-ai-projects/assets.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,5 +2,5 @@
22
"AssetsRepo": "Azure/azure-sdk-assets",
33
"AssetsRepoPrefixPath": "python",
44
"TagPrefix": "python/ai/azure-ai-projects",
5-
"Tag": "python/ai/azure-ai-projects_a5498ae251"
5+
"Tag": "python/ai/azure-ai-projects_84b9cfdbbd"
66
}

sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py

Lines changed: 30 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,7 @@ async def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient"
9797
connection = await self._outer_instance.connections.get_default(
9898
connection_type=ConnectionType.AZURE_AI_SERVICES, include_credentials=True, **kwargs
9999
)
100+
logger.debug("[InferenceOperations.get_chat_completions_client] connection = %s", str(connection))
100101

101102
try:
102103
from azure.ai.inference.aio import ChatCompletionsClient
@@ -107,11 +108,10 @@ async def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient"
107108

108109
if use_serverless_connection:
109110
endpoint = connection.endpoint_url
111+
credential_scopes = ["https://ml.azure.com/.default"]
110112
else:
111-
# Be sure to use the Azure resource name here, not the connection name. Connection name is something that
112-
# admins can pick when they manually create a new connection (or use bicep). Get the Azure resource name
113-
# from the end of the connection id.
114-
endpoint = f"https://{connection.id.split('/')[-1]}.services.ai.azure.com/models"
113+
endpoint = f"{connection.endpoint_url}/models"
114+
credential_scopes = ["https://cognitiveservices.azure.com/.default"]
115115

116116
if connection.authentication_type == AuthenticationType.API_KEY:
117117
logger.debug(
@@ -122,12 +122,13 @@ async def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient"
122122

123123
client = ChatCompletionsClient(endpoint=endpoint, credential=AzureKeyCredential(connection.key))
124124
elif connection.authentication_type == AuthenticationType.ENTRA_ID:
125-
# MaaS models do not yet support EntraID auth
126125
logger.debug(
127126
"[InferenceOperations.get_chat_completions_client]"
128127
+ " Creating ChatCompletionsClient using Entra ID authentication"
129128
)
130-
client = ChatCompletionsClient(endpoint=endpoint, credential=connection.properties.token_credential)
129+
client = ChatCompletionsClient(
130+
endpoint=endpoint, credential=connection.token_credential, credential_scopes=credential_scopes
131+
)
131132
elif connection.authentication_type == AuthenticationType.SAS:
132133
logger.debug(
133134
"[InferenceOperations.get_chat_completions_client] "
@@ -172,6 +173,7 @@ async def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient":
172173
connection = await self._outer_instance.connections.get_default(
173174
connection_type=ConnectionType.AZURE_AI_SERVICES, include_credentials=True, **kwargs
174175
)
176+
logger.debug("[InferenceOperations.get_embeddings_client] connection = %s", str(connection))
175177

176178
try:
177179
from azure.ai.inference.aio import EmbeddingsClient
@@ -182,11 +184,10 @@ async def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient":
182184

183185
if use_serverless_connection:
184186
endpoint = connection.endpoint_url
187+
credential_scopes = ["https://ml.azure.com/.default"]
185188
else:
186-
# Be sure to use the Azure resource name here, not the connection name. Connection name is something that
187-
# admins can pick when they manually create a new connection (or use bicep). Get the Azure resource name
188-
# from the end of the connection id.
189-
endpoint = f"https://{connection.id.split('/')[-1]}.services.ai.azure.com/models"
189+
endpoint = f"{connection.endpoint_url}/models"
190+
credential_scopes = ["https://cognitiveservices.azure.com/.default"]
190191

191192
if connection.authentication_type == AuthenticationType.API_KEY:
192193
logger.debug(
@@ -196,11 +197,12 @@ async def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient":
196197

197198
client = EmbeddingsClient(endpoint=endpoint, credential=AzureKeyCredential(connection.key))
198199
elif connection.authentication_type == AuthenticationType.ENTRA_ID:
199-
# MaaS models do not yet support EntraID auth
200200
logger.debug(
201201
"[InferenceOperations.get_embeddings_client] Creating EmbeddingsClient using Entra ID authentication"
202202
)
203-
client = EmbeddingsClient(endpoint=endpoint, credential=connection.properties.token_credential)
203+
client = EmbeddingsClient(
204+
endpoint=endpoint, credential=connection.token_credential, credential_scopes=credential_scopes
205+
)
204206
elif connection.authentication_type == AuthenticationType.SAS:
205207
logger.debug(
206208
"[InferenceOperations.get_embeddings_client] Creating EmbeddingsClient using SAS authentication"
@@ -235,6 +237,7 @@ async def get_azure_openai_client(self, *, api_version: Optional[str] = None, **
235237
connection = await self._outer_instance.connections.get_default(
236238
connection_type=ConnectionType.AZURE_OPEN_AI, include_credentials=True, **kwargs
237239
)
240+
logger.debug("[InferenceOperations.get_azure_openai_client] connection = %s", str(connection))
238241

239242
try:
240243
from openai import AsyncAzureOpenAI
@@ -250,25 +253,30 @@ async def get_azure_openai_client(self, *, api_version: Optional[str] = None, **
250253
client = AsyncAzureOpenAI(
251254
api_key=connection.key, azure_endpoint=connection.endpoint_url, api_version=api_version
252255
)
253-
elif connection.authentication_type in {AuthenticationType.ENTRA_ID, AuthenticationType.SAS}:
256+
elif connection.authentication_type == AuthenticationType.ENTRA_ID:
257+
logger.debug(
258+
"[InferenceOperations.get_azure_openai_client] " + "Creating AzureOpenAI using Entra ID authentication"
259+
)
254260
try:
255261
from azure.identity.aio import get_bearer_token_provider
256262
except ModuleNotFoundError as e:
257263
raise ModuleNotFoundError(
258264
"azure.identity package not installed. Please install it using 'pip install azure-identity'"
259265
) from e
260-
if connection.authentication_type == AuthenticationType.ENTRA_ID:
261-
auth = "Creating AzureOpenAI using Entra ID authentication"
262-
else:
263-
auth = "Creating AzureOpenAI using SAS authentication"
264-
logger.debug("[InferenceOperations.get_azure_openai_client] %s", auth)
265266
client = AsyncAzureOpenAI(
266267
azure_ad_token_provider=get_bearer_token_provider(
267268
connection.token_credential, "https://cognitiveservices.azure.com/.default"
268269
),
269270
azure_endpoint=connection.endpoint_url,
270271
api_version=api_version,
271272
)
273+
elif connection.authentication_type == AuthenticationType.SAS:
274+
logger.debug(
275+
"[InferenceOperations.get_azure_openai_client] " + "Creating AzureOpenAI using SAS authentication"
276+
)
277+
raise ValueError(
278+
"Getting an AzureOpenAI client from a connection with SAS authentication is not yet supported"
279+
)
272280
else:
273281
raise ValueError("Unknown authentication type")
274282

@@ -299,16 +307,17 @@ async def get_default(
299307
if not connection_type:
300308
raise ValueError("You must specify an connection type")
301309
# Since there is no notion of default connection at the moment, list all connections in the category
302-
# and return the first one
310+
# and return the first one (index 0), unless overridden by the environment variable DEFAULT_CONNECTION_INDEX.
303311
connection_properties_list = await self.list(connection_type=connection_type, **kwargs)
304312
if len(connection_properties_list) > 0:
313+
default_connection_index = int(os.getenv("DEFAULT_CONNECTION_INDEX", "0"))
305314
if include_credentials:
306315
return await self.get(
307-
connection_name=connection_properties_list[0].name,
316+
connection_name=connection_properties_list[default_connection_index].name,
308317
include_credentials=include_credentials,
309318
**kwargs,
310319
)
311-
return connection_properties_list[0]
320+
return connection_properties_list[default_connection_index]
312321
raise ResourceNotFoundError(f"No connection of type {connection_type} found")
313322

314323
@distributed_trace_async

sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py

Lines changed: 31 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@ def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient":
8787
connection = self._outer_instance.connections.get_default(
8888
connection_type=ConnectionType.AZURE_AI_SERVICES, include_credentials=True, **kwargs
8989
)
90+
logger.debug("[InferenceOperations.get_chat_completions_client] connection = %s", str(connection))
9091

9192
try:
9293
from azure.ai.inference import ChatCompletionsClient
@@ -97,11 +98,10 @@ def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient":
9798

9899
if use_serverless_connection:
99100
endpoint = connection.endpoint_url
101+
credential_scopes = ["https://ml.azure.com/.default"]
100102
else:
101-
# Be sure to use the Azure resource name here, not the connection name. Connection name is something that
102-
# admins can pick when they manually create a new connection (or use bicep). Get the Azure resource name
103-
# from the end of the connection id.
104-
endpoint = f"https://{connection.id.split('/')[-1]}.services.ai.azure.com/models"
103+
endpoint = f"{connection.endpoint_url}/models"
104+
credential_scopes = ["https://cognitiveservices.azure.com/.default"]
105105

106106
if connection.authentication_type == AuthenticationType.API_KEY:
107107
logger.debug(
@@ -112,12 +112,13 @@ def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient":
112112

113113
client = ChatCompletionsClient(endpoint=endpoint, credential=AzureKeyCredential(connection.key))
114114
elif connection.authentication_type == AuthenticationType.ENTRA_ID:
115-
# MaaS models do not yet support EntraID auth
116115
logger.debug(
117116
"[InferenceOperations.get_chat_completions_client] "
118117
+ "Creating ChatCompletionsClient using Entra ID authentication"
119118
)
120-
client = ChatCompletionsClient(endpoint=endpoint, credential=connection.properties.token_credential)
119+
client = ChatCompletionsClient(
120+
endpoint=endpoint, credential=connection.token_credential, credential_scopes=credential_scopes
121+
)
121122
elif connection.authentication_type == AuthenticationType.SAS:
122123
logger.debug(
123124
"[InferenceOperations.get_chat_completions_client] "
@@ -161,6 +162,7 @@ def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient":
161162
connection = self._outer_instance.connections.get_default(
162163
connection_type=ConnectionType.AZURE_AI_SERVICES, include_credentials=True, **kwargs
163164
)
165+
logger.debug("[InferenceOperations.get_embeddings_client] connection = %s", str(connection))
164166

165167
try:
166168
from azure.ai.inference import EmbeddingsClient
@@ -171,11 +173,10 @@ def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient":
171173

172174
if use_serverless_connection:
173175
endpoint = connection.endpoint_url
176+
credential_scopes = ["https://ml.azure.com/.default"]
174177
else:
175-
# Be sure to use the Azure resource name here, not the connection name. Connection name is something that
176-
# admins can pick when they manually create a new connection (or use bicep). Get the Azure resource name
177-
# from the end of the connection id.
178-
endpoint = f"https://{connection.id.split('/')[-1]}.services.ai.azure.com/models"
178+
endpoint = f"{connection.endpoint_url}/models"
179+
credential_scopes = ["https://cognitiveservices.azure.com/.default"]
179180

180181
if connection.authentication_type == AuthenticationType.API_KEY:
181182
logger.debug(
@@ -185,11 +186,12 @@ def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient":
185186

186187
client = EmbeddingsClient(endpoint=endpoint, credential=AzureKeyCredential(connection.key))
187188
elif connection.authentication_type == AuthenticationType.ENTRA_ID:
188-
# MaaS models do not yet support EntraID auth
189189
logger.debug(
190190
"[InferenceOperations.get_embeddings_client] Creating EmbeddingsClient using Entra ID authentication"
191191
)
192-
client = EmbeddingsClient(endpoint=endpoint, credential=connection.properties.token_credential)
192+
client = EmbeddingsClient(
193+
endpoint=endpoint, credential=connection.token_credential, credential_scopes=credential_scopes
194+
)
193195
elif connection.authentication_type == AuthenticationType.SAS:
194196
logger.debug(
195197
"[InferenceOperations.get_embeddings_client] Creating EmbeddingsClient using SAS authentication"
@@ -225,6 +227,7 @@ def get_azure_openai_client(self, *, api_version: Optional[str] = None, **kwargs
225227
connection = self._outer_instance.connections.get_default(
226228
connection_type=ConnectionType.AZURE_OPEN_AI, include_credentials=True, **kwargs
227229
)
230+
logger.debug("[InferenceOperations.get_azure_openai_client] connection = %s", str(connection))
228231

229232
try:
230233
from openai import AzureOpenAI
@@ -240,18 +243,16 @@ def get_azure_openai_client(self, *, api_version: Optional[str] = None, **kwargs
240243
client = AzureOpenAI(
241244
api_key=connection.key, azure_endpoint=connection.endpoint_url, api_version=api_version
242245
)
243-
elif connection.authentication_type == {AuthenticationType.ENTRA_ID, AuthenticationType.SAS}:
246+
elif connection.authentication_type == AuthenticationType.ENTRA_ID:
247+
logger.debug(
248+
"[InferenceOperations.get_azure_openai_client] " + "Creating AzureOpenAI using Entra ID authentication"
249+
)
244250
try:
245251
from azure.identity import get_bearer_token_provider
246252
except ModuleNotFoundError as e:
247253
raise ModuleNotFoundError(
248254
"azure.identity package not installed. Please install it using 'pip install azure.identity'"
249255
) from e
250-
if connection.authentication_type == AuthenticationType.ENTRA_ID:
251-
auth = "Creating AzureOpenAI using Entra ID authentication"
252-
else:
253-
auth = "Creating AzureOpenAI using SAS authentication"
254-
logger.debug("[InferenceOperations.get_azure_openai_client] %s", auth)
255256
client = AzureOpenAI(
256257
# See https://learn.microsoft.com/python/api/azure-identity/azure.identity?view=azure-python#azure-identity-get-bearer-token-provider # pylint: disable=line-too-long
257258
azure_ad_token_provider=get_bearer_token_provider(
@@ -260,6 +261,13 @@ def get_azure_openai_client(self, *, api_version: Optional[str] = None, **kwargs
260261
azure_endpoint=connection.endpoint_url,
261262
api_version=api_version,
262263
)
264+
elif connection.authentication_type == AuthenticationType.SAS:
265+
logger.debug(
266+
"[InferenceOperations.get_azure_openai_client] " + "Creating AzureOpenAI using SAS authentication"
267+
)
268+
raise ValueError(
269+
"Getting an AzureOpenAI client from a connection with SAS authentication is not yet supported"
270+
)
263271
else:
264272
raise ValueError("Unknown authentication type")
265273

@@ -290,16 +298,17 @@ def get_default(
290298
if not connection_type:
291299
raise ValueError("You must specify an connection type")
292300
# Since there is no notion of default connection at the moment, list all connections in the category
293-
# and return the first one
301+
# and return the first one (index 0), unless overridden by the environment variable DEFAULT_CONNECTION_INDEX.
294302
connection_properties_list = self.list(connection_type=connection_type, **kwargs)
295303
if len(connection_properties_list) > 0:
304+
default_connection_index = int(os.getenv("DEFAULT_CONNECTION_INDEX", "0"))
296305
if include_credentials:
297306
return self.get(
298-
connection_name=connection_properties_list[0].name,
307+
connection_name=connection_properties_list[default_connection_index].name,
299308
include_credentials=include_credentials,
300309
**kwargs,
301310
)
302-
return connection_properties_list[0]
311+
return connection_properties_list[default_connection_index]
303312
raise ResourceNotFoundError(f"No connection of type {connection_type} found")
304313

305314
@distributed_trace
@@ -2081,7 +2090,7 @@ def submit_tool_outputs_to_stream(
20812090

20822091
elif tool_outputs is not _Unset:
20832092
response = super().submit_tool_outputs_to_run(
2084-
thread_id, run_id, tool_outputs=tool_outputs, stream_parameter=False, stream=False, **kwargs
2093+
thread_id, run_id, tool_outputs=tool_outputs, stream_parameter=True, stream=True, **kwargs
20852094
)
20862095

20872096
elif isinstance(body, io.IOBase):

sdk/ai/azure-ai-projects/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ exclude = [
1717
"sample_chat_completions_with_azure_ai_inference_client_and_console_tracing\\.py",
1818
"sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing\\.py",
1919
"sample_chat_completions_with_azure_ai_inference_client\\.py",
20-
"sample_connections.py"
20+
"sample_inference_client_from_connection.py"
2121
]
2222
warn_unused_configs = true
2323
ignore_missing_imports = true

0 commit comments

Comments
 (0)