Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dotnet/agent-framework-dotnet.slnx
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@
</Folder>
<Folder Name="/Samples/GettingStarted/Observability/">
<Project Path="samples/GettingStarted/AgentOpenTelemetry/AgentOpenTelemetry.csproj" />
<Project Path="samples/GettingStarted/Observability/AIClientHttpTrafficTracing/AIClientHttpTrafficTracing.csproj" Id="cb65e098-6154-4996-aa53-faae2ee93842" />
</Folder>
<Folder Name="/Samples/GettingStarted/Workflows/">
<File Path="samples/GettingStarted/Workflows/README.md" />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>net10.0</TargetFrameworks>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.OpenAI" />
<PackageReference Include="Azure.Identity" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\..\src\Microsoft.Agents.AI.OpenAI\Microsoft.Agents.AI.OpenAI.csproj" />
<ProjectReference Include="..\..\..\..\src\Microsoft.Agents.AI\Microsoft.Agents.AI.csproj" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
// Copyright (c) Microsoft. All rights reserved.

// This sample shows how to enable **HTTP request/response logging** for LLM calls (including request/response bodies) for any `AIClient`.

using System.ClientModel.Primitives;
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;

string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT environment variable is not set.");
string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";

ServiceCollection services = new();
services.AddLogging(loggingBuilder =>
{
loggingBuilder.AddConsole();
loggingBuilder.AddFilter("System.ClientModel.Primitives.MessageLoggingPolicy", LogLevel.Debug); // For Request and Response body logging we need to set Debug level
/* If used in ASP.NET Core, with appsettings then this can be configured in appsettings.json as:
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning",
"System.ClientModel.Primitives.MessageLoggingPolicy": "Debug"
}
}
}
*/
});

services.AddChatClient(provider =>
{
var clientLoggingOptions = new ClientLoggingOptions
{
EnableLogging = true, // Enable logging overall
EnableMessageContentLogging = true, // Enable request and response body logging
MessageContentSizeLimit = 5000, // Limit size of logged content. If Null or Not set, then default value will be 4 * 1024 characters
EnableMessageLogging = true, // Logging the Request and Response Url and Header information. If Null or Not set, then default value will be true
LoggerFactory = provider.GetRequiredService<ILoggerFactory>()
};
// WARNING: Do NOT log sensitive headers such as "Authorization" in production or shared environments.
// By default, sensitive headers are REDACTED. The following example shows how to override this behavior
// for controlled, non-production testing only.
clientLoggingOptions.AllowedHeaderNames.Add("Authorization");

/* Switch to OpenAI Compatible SDK using below code
var clientOptions = new OpenAIClientOptions()
{
Endpoint = new Uri("https://endpoint"),
ClientLoggingOptions = clientLoggingOptions
};
new OpenAIClient(new ApiKeyCredential("<apiKey/accessKey>"), clientOptions)
.GetChatClient("modelName")
.AsIChatClient();
*/

return new AzureOpenAIClient(new Uri(endpoint), new AzureCliCredential(), new AzureOpenAIClientOptions() // Use OpenAIClientOptions of OpenAIClient, similar options for other clients
{
ClientLoggingOptions = clientLoggingOptions
})
.GetChatClient(deploymentName)
.AsIChatClient();
});

ServiceProvider serviceProvider = services.BuildServiceProvider();

IChatClient chatClient = serviceProvider.GetRequiredService<IChatClient>();
ChatClientAgent pirateAssistant = chatClient.CreateAIAgent("You are a pirate assistant. Answer questions in short pirate speak.");

string userInput = "Who are you?";
Console.WriteLine($"You: {userInput}\n");
AgentRunResponse response = await pirateAssistant.RunAsync(userInput);
Console.WriteLine($"\nPirate Assistant: {response}");

/*await foreach (var item in pirateAssistant.RunStreamingAsync(userInput)) // For Streaming responses (RunStreamingAsync), there will be multiple log entries
{
Console.Write(item);
}*/
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# AIClient HTTP Traffic Tracing

This sample shows how to enable **HTTP request/response logging** for LLM calls (including request/response bodies) for any `AIClient`.

It uses the `ClientLoggingOptions` pipeline to print HTTP details to the `ILogger` so you can troubleshoot prompts, headers, and responses.

## Prerequisites

- Azure CLI login (this sample uses `AzureCliCredential`):
- `az login`
- Environment variables:
- `AZURE_OPENAI_ENDPOINT` (e.g. `https://{resource-name}.openai.azure.com/`)
- `AZURE_OPENAI_DEPLOYMENT_NAME` (optional; defaults to `gpt-4o-mini`)

Switch to OpenAI Compatible SDK using below code
```csharp
var clientOptions = new OpenAIClientOptions()
{
Endpoint = new Uri("https://endpoint"),
ClientLoggingOptions = clientLoggingOptions
};
new OpenAIClient(new ApiKeyCredential("<apiKey/accessKey>"), clientOptions)
.GetChatClient("modelName")
.AsIChatClient();
```

## Run

From the repo root:

```powershell
cd samples/GettingStarted/Observability/AIClientHttpTrafficTracing
dotnet run
```

## Enable HTTP traffic logging

This sample enables logging in two places:

1. **Enable HTTP logging on the client**

In [Program.cs](Program.cs), the sample configures:

- `ClientLoggingOptions.EnableLogging = true`
- `ClientLoggingOptions.EnableMessageLogging = true` (URL + headers + query parameters)
- `ClientLoggingOptions.EnableMessageContentLogging = true` (request/response bodies)
- `ClientLoggingOptions.MessageContentSizeLimit` to cap how much body content is written

`ClientLoggingOptions` is a common pattern across SDK clients that expose these options (for example, via a `ClientLoggingOptions` property on client options like `AzureOpenAIClientOptions`).

2. **Raise the log level to `Debug` only if you want request/response bodies**

URL/headers/query parameter logging (step 1) is normally available at `Information` level and step 2 is not needed.

Request/response *body* logging is emitted at `Debug` level by the underlying message logging policy. The sample sets:

- `System.ClientModel.Primitives.MessageLoggingPolicy` → `Debug`

## Security notes

- Logging bodies can include sensitive prompt/response data. Use only in dev/test.
- Headers like `Authorization` are **redacted by default**. While it is technically possible to allow logging a sensitive header (for example, via `clientLoggingOptions.AllowedHeaderNames.Add("Authorization")`), **do not enable this in production or long-lived environments**. If you must temporarily log such headers for debugging, do so only in tightly controlled, short-lived sessions, treat the logs as secrets, and securely delete them immediately after use.

## Using ASP.NET Core configuration

If you’re using ASP.NET Core, you can set the log level in `appsettings.json` instead of calling `AddFilter`, for example:

```json
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning",
"System.ClientModel.Primitives.MessageLoggingPolicy": "Debug"
}
}
}
```
Loading