@@ -61,7 +61,7 @@ Let's begin instrumenting our agent with OpenTelemetry tracing, by starting off
6161``` python
6262from azure.ai.projects import AIProjectClient
6363from azure.identity import DefaultAzureCredential
64- project_client = AIProjectClient.from_connection_string (
64+ project_client = AIProjectClient(
6565 credential = DefaultAzureCredential(),
6666 endpoint = os.environ[" PROJECT_ENDPOINT" ],
6767)
@@ -71,12 +71,8 @@ Next, retrieve the connection string from the Application Insights resource conn
7171
7272``` python
7373from azure.monitor.opentelemetry import configure_azure_monitor
74- connection_string = project_client.telemetry.get_connection_string()
75-
76- if not connection_string:
77- print (" Application Insights is not enabled. Enable by going to Tracing in your Azure AI Foundry project." )
78- exit ()
7974
75+ connection_string = project_client.telemetry.get_application_insights_connection_string()
8076configure_azure_monitor(connection_string = connection_string) # enable telemetry collection
8177```
8278
@@ -92,11 +88,11 @@ with tracer.start_as_current_span("example-tracing"):
9288 name = " my-assistant" ,
9389 instructions = " You are a helpful assistant"
9490 )
95- thread = project_client.agents.create_thread ()
96- message = project_client.agents.create_message (
91+ thread = project_client.agents.threads.create ()
92+ message = project_client.agents.messages.create (
9793 thread_id = thread.id, role = " user" , content = " Tell me a joke"
9894 )
99- run = project_client.agents.create_run (thread_id = thread.id, agent_id = agent.id)
95+ run = project_client.agents.runs.create_and_process (thread_id = thread.id, agent_id = agent.id)
10096```
10197
10298After running your agent, you can go begin to [ view traces in Azure AI Foundry Portal] ( #view-traces-in-azure-ai-foundry-portal ) .
@@ -108,7 +104,8 @@ To connect to [Aspire Dashboard](https://aspiredashboard.com/#start) or another
108104``` bash
109105pip install azure-core-tracing-opentelemetry opentelemetry-exporter-otlp opentelemetry-sdk
110106```
111- Next, you want to configure tracing for your application.
107+
108+ Next, configure tracing for console output:
112109
113110``` python
114111from azure.core.settings import settings
@@ -124,18 +121,16 @@ tracer_provider = TracerProvider()
124121tracer_provider.add_span_processor(SimpleSpanProcessor(span_exporter))
125122trace.set_tracer_provider(tracer_provider)
126123```
127- Use ` enable_telemetry ` to begin collecting telemetry.
128124
129- ``` python
130- from azure.ai.projects import enable_telemetry
131- enable_telemetry( destination = sys.stdout)
125+ Or modify the above code, based on [ Aspire Dashboard ] ( https://aspiredashboard.com/#start ) , to trace to a local OTLP viewer.
126+
127+ Now enable Agent instrumentation and run your Agent:
132128
133- # Logging to an OTLP endpoint, change the destination to
134- # enable_telemetry(destination="http://localhost:4317")
135- ```
136129``` python
130+ from azure.ai.agents.telemetry import AIAgentsInstrumentor
131+ AIAgentsInstrumentor().instrument()
132+
137133# Start tracing
138- from opentelemetry import trace
139134tracer = trace.get_tracer(__name__ )
140135
141136with tracer.start_as_current_span(" example-tracing" ):
@@ -144,11 +139,11 @@ with tracer.start_as_current_span("example-tracing"):
144139 name = " my-assistant" ,
145140 instructions = " You are a helpful assistant"
146141 )
147- thread = project_client.agents.create_thread ()
148- message = project_client.agents.create_message (
142+ thread = project_client.agents.threads.create ()
143+ message = project_client.agents.messages.create (
149144 thread_id = thread.id, role = " user" , content = " Tell me a joke"
150145 )
151- run = project_client.agents.create_run (thread_id = thread.id, agent_id = agent.id)
146+ run = project_client.agents.runs.create_and_process (thread_id = thread.id, agent_id = agent.id)
152147```
153148
154149## Trace custom functions
0 commit comments