@@ -458,6 +458,7 @@ The following end-to-end example uses the Azure AI Inference SDK in Python and s
458
458
### Prerequisites
459
459
460
460
To run this example, you need the following prerequisites:
461
+
461
462
- [ Visual Studio Code] ( https://code.visualstudio.com/ )
462
463
- [ AI Toolkit extension] ( https://marketplace.visualstudio.com/items?itemName=ms-ai-toolkit.vscode-ai-toolkit )
463
464
- [ Azure AI Inference SDK] ( https://pypi.org/project/azure-ai-inference/ )
@@ -532,78 +533,76 @@ Use the following instructions to deploy a preconfigured development environment
532
533
533
534
1. In the ` my-tracing-app` directory, create a Python file named ` main.py` .
534
535
535
- You' ll add the code to set up tracing and interact with the Azure AI Inference SDK.
536
-
537
- 1. Add the following code to `main.py`:
538
-
539
- ```python
540
- import os
541
-
542
- ### Set up for OpenTelemetry tracing ###
543
- os.environ["AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED"] = "true"
544
- os.environ["AZURE_SDK_TRACING_IMPLEMENTATION"] = "opentelemetry"
545
-
546
- from opentelemetry import trace, _events
547
- from opentelemetry.sdk.resources import Resource
548
- from opentelemetry.sdk.trace import TracerProvider
549
- from opentelemetry.sdk.trace.export import BatchSpanProcessor
550
- from opentelemetry.sdk._logs import LoggerProvider
551
- from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
552
- from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
553
- from opentelemetry.sdk._events import EventLoggerProvider
554
- from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
555
-
556
- github_token = os.environ["GITHUB_TOKEN"]
557
-
558
- resource = Resource(attributes={
559
- "service.name": "opentelemetry-instrumentation-azure-ai-inference"
560
- })
561
- provider = TracerProvider(resource=resource)
562
- otlp_exporter = OTLPSpanExporter(
563
- endpoint="http://localhost:4318/v1/traces",
564
- )
565
- processor = BatchSpanProcessor(otlp_exporter)
566
- provider.add_span_processor(processor)
567
- trace.set_tracer_provider(provider)
568
-
569
- logger_provider = LoggerProvider(resource=resource)
570
- logger_provider.add_log_record_processor(
571
- BatchLogRecordProcessor(OTLPLogExporter(endpoint="http://localhost:4318/v1/logs"))
572
- )
573
- _events.set_event_logger_provider(EventLoggerProvider(logger_provider))
574
-
575
- from azure.ai.inference.tracing import AIInferenceInstrumentor
576
- AIInferenceInstrumentor().instrument()
577
- ### Set up for OpenTelemetry tracing ###
578
-
579
- from azure.ai.inference import ChatCompletionsClient
580
- from azure.ai.inference.models import UserMessage
581
- from azure.ai.inference.models import TextContentItem
582
- from azure.core.credentials import AzureKeyCredential
583
-
584
- client = ChatCompletionsClient(
585
- endpoint = "https://models.inference.ai.azure.com",
586
- credential = AzureKeyCredential(github_token),
587
- api_version = "2024-08-01-preview",
588
- )
589
-
590
- response = client.complete(
591
- messages = [
592
- UserMessage(content = [
593
- TextContentItem(text = "hi"),
594
- ]),
595
- ],
596
- model = "gpt-4.1",
597
- tools = [],
598
- response_format = "text",
599
- temperature = 1,
600
- top_p = 1,
601
- )
602
-
603
- print(response.choices[0].message.content)
604
- ```
605
-
606
- 1. Save the file.
536
+ You' ll add the code to set up tracing and interact with the Azure AI Inference SDK.
537
+
538
+ 1. Add the following code to `main.py` and save the file:
539
+
540
+ ```python
541
+ import os
542
+
543
+ ### Set up for OpenTelemetry tracing ###
544
+ os.environ["AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED"] = "true"
545
+ os.environ["AZURE_SDK_TRACING_IMPLEMENTATION"] = "opentelemetry"
546
+
547
+ from opentelemetry import trace, _events
548
+ from opentelemetry.sdk.resources import Resource
549
+ from opentelemetry.sdk.trace import TracerProvider
550
+ from opentelemetry.sdk.trace.export import BatchSpanProcessor
551
+ from opentelemetry.sdk._logs import LoggerProvider
552
+ from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
553
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
554
+ from opentelemetry.sdk._events import EventLoggerProvider
555
+ from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
556
+
557
+ github_token = os.environ["GITHUB_TOKEN"]
558
+
559
+ resource = Resource(attributes={
560
+ "service.name": "opentelemetry-instrumentation-azure-ai-inference"
561
+ })
562
+ provider = TracerProvider(resource=resource)
563
+ otlp_exporter = OTLPSpanExporter(
564
+ endpoint="http://localhost:4318/v1/traces",
565
+ )
566
+ processor = BatchSpanProcessor(otlp_exporter)
567
+ provider.add_span_processor(processor)
568
+ trace.set_tracer_provider(provider)
569
+
570
+ logger_provider = LoggerProvider(resource=resource)
571
+ logger_provider.add_log_record_processor(
572
+ BatchLogRecordProcessor(OTLPLogExporter(endpoint="http://localhost:4318/v1/logs"))
573
+ )
574
+ _events.set_event_logger_provider(EventLoggerProvider(logger_provider))
575
+
576
+ from azure.ai.inference.tracing import AIInferenceInstrumentor
577
+ AIInferenceInstrumentor().instrument()
578
+ ### Set up for OpenTelemetry tracing ###
579
+
580
+ from azure.ai.inference import ChatCompletionsClient
581
+ from azure.ai.inference.models import UserMessage
582
+ from azure.ai.inference.models import TextContentItem
583
+ from azure.core.credentials import AzureKeyCredential
584
+
585
+ client = ChatCompletionsClient(
586
+ endpoint = "https://models.inference.ai.azure.com",
587
+ credential = AzureKeyCredential(github_token),
588
+ api_version = "2024-08-01-preview",
589
+ )
590
+
591
+ response = client.complete(
592
+ messages = [
593
+ UserMessage(content = [
594
+ TextContentItem(text = "hi"),
595
+ ]),
596
+ ],
597
+ model = "gpt-4.1",
598
+ tools = [],
599
+ response_format = "text",
600
+ temperature = 1,
601
+ top_p = 1,
602
+ )
603
+
604
+ print(response.choices[0].message.content)
605
+ ```
607
606
608
607
1. Run the code
609
608
0 commit comments