Skip to content

Commit e3adb2c

Browse files
committed
Simplify console HTTP pipeline logging using Observe/Observable
C# 14 can provide an extension method for OpenAIClientOptions directly, so we expose that instead of our own ClientOptions (less API surface). The new `Observable` creates and sets up the observers. If you already have an instance, you'd call `Observe` on it instead. Remove unnecessary console JSON pipeline policy since we can just leverage the general purpose Observe now.
1 parent 5d54e1a commit e3adb2c

File tree

8 files changed

+121
-148
lines changed

8 files changed

+121
-148
lines changed

readme.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ var requests = new List<JsonNode>();
133133
var responses = new List<JsonNode>();
134134
var openai = new OpenAIClient(
135135
Env.Get("OPENAI_API_KEY")!,
136-
ClientOptions.Observe(requests.Add, responses.Add));
136+
OpenAIClientOptions.Observable(requests.Add, responses.Add));
137137
```
138138

139139

src/AI.Tests/Extensions/PipelineTestOutput.cs

Lines changed: 17 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,19 +3,28 @@
33

44
namespace Devlooped.Extensions.AI;
55

6-
public static class PipelineTestOutput
6+
public static class PipelineOutput
77
{
8-
static readonly JsonSerializerOptions options = new(JsonSerializerDefaults.General)
8+
extension<TOptions>(TOptions) where TOptions : ClientPipelineOptions, new()
9+
{
10+
public static TOptions WriteTo(ITestOutputHelper output)
11+
=> new TOptions().WriteTo(output);
12+
}
13+
}
14+
15+
public static class PipelineOutputExtensions
16+
{
17+
static readonly JsonSerializerOptions jsonOptions = new(JsonSerializerDefaults.General)
918
{
1019
WriteIndented = true,
1120
};
1221

13-
public static TOptions WriteTo<TOptions>(this TOptions pipelineOptions, ITestOutputHelper output = default)
14-
where TOptions : ClientPipelineOptions
22+
extension<TOptions>(TOptions options) where TOptions : ClientPipelineOptions
1523
{
16-
return pipelineOptions.Observe(
17-
request => output.WriteLine(request.ToJsonString(options)),
18-
response => output.WriteLine(response.ToJsonString(options))
19-
);
24+
public TOptions WriteTo(ITestOutputHelper output)
25+
=> options.Observe(
26+
request => output.WriteLine(request.ToJsonString(jsonOptions)),
27+
response => output.WriteLine(response.ToJsonString(jsonOptions))
28+
);
2029
}
2130
}

src/AI.Tests/GrokTests.cs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -51,9 +51,9 @@ public async Task GrokInvokesToolAndSearch()
5151
var requests = new List<JsonNode>();
5252
var responses = new List<JsonNode>();
5353

54-
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3",
55-
ClientOptions.Observable(requests.Add, responses.Add)
56-
.WriteTo(output))
54+
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions
55+
.Observable(requests.Add, responses.Add)
56+
.WriteTo(output))
5757
.AsBuilder()
5858
.UseFunctionInvocation()
5959
.Build();
@@ -105,9 +105,9 @@ public async Task GrokInvokesHostedSearchTool()
105105
var requests = new List<JsonNode>();
106106
var responses = new List<JsonNode>();
107107

108-
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3",
109-
ClientOptions.Observable(requests.Add, responses.Add)
110-
.WriteTo(output));
108+
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions
109+
.Observable(requests.Add, responses.Add)
110+
.WriteTo(output));
111111

112112
var options = new ChatOptions
113113
{

src/AI.Tests/OpenAITests.cs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
using System.Text.Json.Nodes;
22
using Microsoft.Extensions.AI;
3+
using OpenAI;
34
using static ConfigurationExtensions;
45

56
namespace Devlooped.Extensions.AI;
@@ -15,7 +16,7 @@ public async Task OpenAISwitchesModel()
1516
};
1617

1718
var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1-nano",
18-
new OpenAI.OpenAIClientOptions().WriteTo(output));
19+
OpenAIClientOptions.WriteTo(output));
1920

2021
var options = new ChatOptions
2122
{
@@ -41,7 +42,7 @@ public async Task OpenAIThinks()
4142
var requests = new List<JsonNode>();
4243

4344
var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "o3-mini",
44-
ClientOptions.Observable(requests.Add).WriteTo(output));
45+
OpenAIClientOptions.Observable(requests.Add).WriteTo(output));
4546

4647
var options = new ChatOptions
4748
{

src/AI/ClientOptions.cs

Lines changed: 0 additions & 31 deletions
This file was deleted.

src/AI/ClientPipelineExtensions.cs

Lines changed: 36 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -4,28 +4,44 @@
44

55
namespace Devlooped.Extensions.AI;
66

7+
/// <summary>
8+
/// Provides extension methods for <see cref="ClientPipelineOptions"/>.
9+
/// </summary>
710
public static class ClientPipelineExtensions
811
{
9-
/// <summary>
10-
/// Adds a <see cref="PipelinePolicy"/> that observes requests and response
11-
/// messages from the <see cref="ClientPipeline"/> and notifies the provided
12-
/// callbacks with the JSON representation of the HTTP messages.
13-
/// </summary>
14-
/// <typeparam name="TOptions">The options type to configure for HTTP logging.</typeparam>
15-
/// <param name="pipelineOptions">The options instance to configure.</param>
16-
/// <param name="onRequest">A callback to process the <see cref="JsonNode"/> that was sent.</param>
17-
/// <param name="onResponse">A callback to process the <see cref="JsonNode"/> that was received.</param>
18-
/// <remarks>
19-
/// This is the lowst-level logging after all chat pipeline processing has been done.
20-
/// If no <see cref="JsonNode"/> can be parsed from the request or response,
21-
/// the callbacks will not be invoked.
22-
/// </remarks>
23-
public static TOptions Observe<TOptions>(this TOptions pipelineOptions,
24-
Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default)
25-
where TOptions : ClientPipelineOptions
12+
extension<TOptions>(TOptions) where TOptions : ClientPipelineOptions, new()
2613
{
27-
pipelineOptions.AddPolicy(new ObservePipelinePolicy(onRequest, onResponse), PipelinePosition.BeforeTransport);
28-
return pipelineOptions;
14+
/// <summary>
15+
/// Creates an instance of the <see cref="TOptions"/> that can be observed for requests and responses.
16+
/// </summary>
17+
/// <param name="onRequest">A callback to process the <see cref="JsonNode"/> that was sent.</param>
18+
/// <param name="onResponse">A callback to process the <see cref="JsonNode"/> that was received.</param>
19+
/// <returns>A new instance of <typeparamref name="TOptions"/>.</returns>
20+
public static TOptions Observable(Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default)
21+
=> new TOptions().Observe(onRequest, onResponse);
22+
}
23+
24+
extension<TOptions>(TOptions options) where TOptions : ClientPipelineOptions
25+
{
26+
/// <summary>
27+
/// Adds a <see cref="PipelinePolicy"/> that observes requests and response
28+
/// messages from the <see cref="ClientPipeline"/> and notifies the provided
29+
/// callbacks with the JSON representation of the HTTP messages.
30+
/// </summary>
31+
/// <typeparam name="TOptions">The options type to configure for HTTP logging.</typeparam>
32+
/// <param name="pipelineOptions">The options instance to configure.</param>
33+
/// <param name="onRequest">A callback to process the <see cref="JsonNode"/> that was sent.</param>
34+
/// <param name="onResponse">A callback to process the <see cref="JsonNode"/> that was received.</param>
35+
/// <remarks>
36+
/// This is the lowst-level logging after all chat pipeline processing has been done.
37+
/// If no <see cref="JsonNode"/> can be parsed from the request or response,
38+
/// the callbacks will not be invoked.
39+
/// </remarks>
40+
public TOptions Observe(Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default)
41+
{
42+
options.AddPolicy(new ObservePipelinePolicy(onRequest, onResponse), PipelinePosition.BeforeTransport);
43+
return options;
44+
}
2945
}
3046

3147
class ObservePipelinePolicy(Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default) : PipelinePolicy
@@ -78,4 +94,4 @@ void NotifyObservers(PipelineMessage message)
7894
}
7995
}
8096
}
81-
}
97+
}

src/AI/Console/JsonConsoleLoggingExtensions.cs

Lines changed: 35 additions & 80 deletions
Original file line numberDiff line numberDiff line change
@@ -12,97 +12,52 @@ namespace Microsoft.Extensions.AI;
1212
[EditorBrowsable(EditorBrowsableState.Never)]
1313
public static class JsonConsoleLoggingExtensions
1414
{
15-
/// <summary>
16-
/// Sets a <see cref="ClientPipelineOptions.Transport"/> that renders HTTP messages to the
17-
/// console using Spectre.Console rich JSON formatting, but only if the console is interactive.
18-
/// </summary>
19-
/// <typeparam name="TOptions">The options type to configure for HTTP logging.</typeparam>
20-
/// <param name="pipelineOptions">The options instance to configure.</param>
21-
/// <remarks>
22-
/// NOTE: this is the lowest-level logging after all chat pipeline processing has been done.
23-
/// <para>
24-
/// If the options already provide a transport, it will be wrapped with the console
25-
/// logging transport to minimize the impact on existing configurations.
26-
/// </para>
27-
/// </remarks>
28-
public static TOptions UseJsonConsoleLogging<TOptions>(this TOptions pipelineOptions, JsonConsoleOptions? consoleOptions = null)
29-
where TOptions : ClientPipelineOptions
15+
extension<TOptions>(TOptions pipelineOptions) where TOptions : ClientPipelineOptions
3016
{
31-
consoleOptions ??= JsonConsoleOptions.Default;
32-
33-
if (consoleOptions.InteractiveConfirm && ConsoleExtensions.IsConsoleInteractive && !AnsiConsole.Confirm("Do you want to enable rich JSON console logging for HTTP pipeline messages?"))
34-
return pipelineOptions;
35-
36-
if (consoleOptions.InteractiveOnly && !ConsoleExtensions.IsConsoleInteractive)
37-
return pipelineOptions;
38-
39-
pipelineOptions.AddPolicy(new JsonConsoleLoggingPipelinePolicy(consoleOptions), PipelinePosition.BeforeTransport);
40-
return pipelineOptions;
41-
}
42-
43-
/// <summary>
44-
/// Renders chat messages and responses to the console using Spectre.Console rich JSON formatting.
45-
/// </summary>
46-
/// <param name="builder">The builder in use.</param>
47-
/// <remarks>
48-
/// Confirmation will be asked if the console is interactive, otherwise, it will be
49-
/// enabled unconditionally.
50-
/// </remarks>
51-
public static ChatClientBuilder UseJsonConsoleLogging(this ChatClientBuilder builder, JsonConsoleOptions? consoleOptions = null)
52-
{
53-
consoleOptions ??= JsonConsoleOptions.Default;
17+
/// <summary>
18+
/// Observes the HTTP request and response messages from the underlying pipeline and renders them
19+
/// to the console using Spectre.Console rich JSON formatting, but only if the console is interactive.
20+
/// </summary>
21+
/// <typeparam name="TOptions">The options type to configure for HTTP logging.</typeparam>
22+
/// <param name="pipelineOptions">The options instance to configure.</param>
23+
/// <see cref="ClientPipelineExtensions.Observe"/>
24+
public TOptions UseJsonConsoleLogging(JsonConsoleOptions? consoleOptions = null)
25+
{
26+
consoleOptions ??= JsonConsoleOptions.Default;
5427

55-
if (consoleOptions.InteractiveConfirm && ConsoleExtensions.IsConsoleInteractive && !AnsiConsole.Confirm("Do you want to enable rich JSON console logging for HTTP pipeline messages?"))
56-
return builder;
28+
if (consoleOptions.InteractiveConfirm && ConsoleExtensions.IsConsoleInteractive && !AnsiConsole.Confirm("Do you want to enable rich JSON console logging for HTTP pipeline messages?"))
29+
return pipelineOptions;
5730

58-
if (consoleOptions.InteractiveOnly && !ConsoleExtensions.IsConsoleInteractive)
59-
return builder;
31+
if (consoleOptions.InteractiveOnly && !ConsoleExtensions.IsConsoleInteractive)
32+
return pipelineOptions;
6033

61-
return builder.Use(inner => new JsonConsoleLoggingChatClient(inner, consoleOptions));
34+
return pipelineOptions.Observe(
35+
request => AnsiConsole.Write(consoleOptions.CreatePanel(request)),
36+
response => AnsiConsole.Write(consoleOptions.CreatePanel(response)));
37+
}
6238
}
6339

64-
class JsonConsoleLoggingPipelinePolicy(JsonConsoleOptions consoleOptions) : PipelinePolicy
40+
extension(ChatClientBuilder builder)
6541
{
66-
public override void Process(PipelineMessage message, IReadOnlyList<PipelinePolicy> pipeline, int currentIndex)
42+
/// <summary>
43+
/// Renders chat messages and responses to the console using Spectre.Console rich JSON formatting.
44+
/// </summary>
45+
/// <param name="builder">The builder in use.</param>
46+
/// <remarks>
47+
/// Confirmation will be asked if the console is interactive, otherwise, it will be
48+
/// enabled unconditionally.
49+
/// </remarks>
50+
public ChatClientBuilder UseJsonConsoleLogging(JsonConsoleOptions? consoleOptions = null)
6751
{
68-
message.BufferResponse = true;
69-
ProcessNext(message, pipeline, currentIndex);
70-
71-
if (message.Request.Content is not null)
72-
{
73-
using var memory = new MemoryStream();
74-
message.Request.Content.WriteTo(memory);
75-
memory.Position = 0;
76-
using var reader = new StreamReader(memory);
77-
var content = reader.ReadToEnd();
78-
AnsiConsole.Write(consoleOptions.CreatePanel(content));
79-
}
80-
81-
if (message.Response != null)
82-
{
83-
AnsiConsole.Write(consoleOptions.CreatePanel(message.Response.Content.ToString()));
84-
}
85-
}
52+
consoleOptions ??= JsonConsoleOptions.Default;
8653

87-
public override async ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList<PipelinePolicy> pipeline, int currentIndex)
88-
{
89-
message.BufferResponse = true;
90-
await ProcessNextAsync(message, pipeline, currentIndex);
54+
if (consoleOptions.InteractiveConfirm && ConsoleExtensions.IsConsoleInteractive && !AnsiConsole.Confirm("Do you want to enable rich JSON console logging for HTTP pipeline messages?"))
55+
return builder;
9156

92-
if (message.Request.Content is not null)
93-
{
94-
using var memory = new MemoryStream();
95-
message.Request.Content.WriteTo(memory);
96-
memory.Position = 0;
97-
using var reader = new StreamReader(memory);
98-
var content = await reader.ReadToEndAsync();
99-
AnsiConsole.Write(consoleOptions.CreatePanel(content));
100-
}
57+
if (consoleOptions.InteractiveOnly && !ConsoleExtensions.IsConsoleInteractive)
58+
return builder;
10159

102-
if (message.Response != null)
103-
{
104-
AnsiConsole.Write(consoleOptions.CreatePanel(message.Response.Content.ToString()));
105-
}
60+
return builder.Use(inner => new JsonConsoleLoggingChatClient(inner, consoleOptions));
10661
}
10762
}
10863

src/AI/Console/JsonConsoleOptions.cs

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,29 @@ internal Panel CreatePanel(string json)
9191
return panel;
9292
}
9393

94+
internal Panel CreatePanel(JsonNode node)
95+
{
96+
string json;
97+
98+
// Determine if we need to pre-process the JSON node based on the settings.
99+
if (TruncateLength.HasValue || !IncludeAdditionalProperties)
100+
{
101+
json = node.ToShortJsonString(TruncateLength, IncludeAdditionalProperties);
102+
}
103+
else
104+
{
105+
// i.e. we had no pre-processing to do
106+
json = node.ToJsonString();
107+
}
108+
109+
var panel = new Panel(WrapLength.HasValue ? new WrappedJsonText(json, WrapLength.Value) : new JsonText(json))
110+
{
111+
Border = Border,
112+
BorderStyle = BorderStyle,
113+
};
114+
return panel;
115+
}
116+
94117
internal Panel CreatePanel(object value)
95118
{
96119
string? json = null;

0 commit comments

Comments
 (0)