Skip to content

Commit f3b70ea

Browse files
committed
Add support for OpenAI web search options and Grok compat
* Add OpenAI/Grok subnamespace: helps organize functionality areas * Adds OpenAI WebSearchTool-specific extensions under that namespace: region, city, timezone, context size. * Cross-provider compatibility by interpreting WebSearchTool as GrokSearch(Auto) with a GrokWebSource(country).
1 parent 64493d5 commit f3b70ea

File tree

12 files changed

+165
-71
lines changed

12 files changed

+165
-71
lines changed

readme.md

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -148,19 +148,31 @@ var options = new ChatOptions
148148
var response = await chat.GetResponseAsync(messages, options);
149149
```
150150

151+
### Web Search
152+
151153
Similar to the Grok client, we provide the `WebSearchTool` to enable search customization
152154
in OpenAI too:
153155

154156
```csharp
155157
var options = new ChatOptions
156158
{
157159
// 👇 search in Argentina, Bariloche region
158-
Tools = [new WebSearchTool("AR") { Region = "Bariloche" }]
160+
Tools = [new WebSearchTool("AR")
161+
{
162+
Region = "Bariloche", // 👈 Bariloche region
163+
TimeZone = "America/Argentina/Buenos_Aires", // 👈 IANA timezone
164+
ContextSize = WebSearchContextSize.High // 👈 high search context size
165+
}]
159166
};
160167
```
161168

162-
If country/region hints to the model are not needed, you can use the built-in M.E.AI
163-
`HostedWebSearchTool` instead, which is a more generic tool.
169+
> [!NOTE]
170+
> This enables all features supported by the [Web search](https://platform.openai.com/docs/guides/tools-web-search)
171+
> feature in OpenAI.
172+
173+
If advanced search settings are not needed, you can use the built-in M.E.AI `HostedWebSearchTool`
174+
instead, which is a more generic tool and provides the basics out of the box.
175+
164176

165177
## Observing Request/Response
166178

src/AI.Tests/GrokTests.cs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
using System.Text.Json.Nodes;
2+
using Devlooped.Extensions.AI.Grok;
23
using Microsoft.Extensions.AI;
34
using static ConfigurationExtensions;
5+
using OpenAIClientOptions = OpenAI.OpenAIClientOptions;
46

57
namespace Devlooped.Extensions.AI;
68

@@ -51,7 +53,7 @@ public async Task GrokInvokesToolAndSearch()
5153
var requests = new List<JsonNode>();
5254
var responses = new List<JsonNode>();
5355

54-
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions
56+
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAIClientOptions
5557
.Observable(requests.Add, responses.Add)
5658
.WriteTo(output))
5759
.AsBuilder()
@@ -105,7 +107,7 @@ public async Task GrokInvokesHostedSearchTool()
105107
var requests = new List<JsonNode>();
106108
var responses = new List<JsonNode>();
107109

108-
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions
110+
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAIClientOptions
109111
.Observable(requests.Add, responses.Add)
110112
.WriteTo(output));
111113

@@ -185,7 +187,7 @@ public async Task GrokInvokesSpecificSearchUrl()
185187
var requests = new List<JsonNode>();
186188
var responses = new List<JsonNode>();
187189

188-
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions
190+
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAIClientOptions
189191
.Observable(requests.Add, responses.Add)
190192
.WriteTo(output));
191193

src/AI.Tests/OpenAITests.cs

Lines changed: 19 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
using System.Text.Json.Nodes;
2+
using Devlooped.Extensions.AI.OpenAI;
23
using Microsoft.Extensions.AI;
34
using OpenAI;
5+
using OpenAI.Responses;
46
using static ConfigurationExtensions;
57

68
namespace Devlooped.Extensions.AI;
@@ -68,7 +70,7 @@ public async Task OpenAIThinks()
6870
}
6971

7072
[SecretsFact("OPENAI_API_KEY")]
71-
public async Task WebSearchCountry()
73+
public async Task WebSearchCountryHighContext()
7274
{
7375
var messages = new Chat()
7476
{
@@ -94,23 +96,28 @@ public async Task WebSearchCountry()
9496

9597
var options = new ChatOptions
9698
{
97-
Tools = [new WebSearchTool("AR") { Region = "Bariloche" }]
99+
Tools = [new WebSearchTool("AR")
100+
{
101+
Region = "Bariloche",
102+
TimeZone = "America/Argentina/Buenos_Aires",
103+
ContextSize = WebSearchContextSize.High
104+
}]
98105
};
99106

100107
var response = await chat.GetResponseAsync(messages, options);
101108
var text = response.Text;
102109

103-
// Citations include catedralaltapatagonia.com at least as a web search source
104-
Assert.Single(responses);
105-
var node = responses[0];
106-
Assert.NotNull(node);
107-
var citations = Assert.IsType<JsonArray>(node["citations"], false);
108-
var catedral = citations.Where(x => x != null).Any(x => x!.ToString().Contains("catedralaltapatagonia.com", StringComparison.Ordinal));
110+
var raw = Assert.IsType<OpenAIResponse>(response.RawRepresentation);
111+
Assert.NotEmpty(raw.OutputItems.OfType<WebSearchCallResponseItem>());
109112

110-
Assert.True(catedral, "Expected at least one citation to catedralaltapatagonia.com");
113+
var assistant = raw.OutputItems.OfType<MessageResponseItem>().Where(x => x.Role == MessageRole.Assistant).FirstOrDefault();
114+
Assert.NotNull(assistant);
111115

112-
// Uses the default model set by the client when we asked for it
113-
Assert.Equal("grok-3", response.ModelId);
114-
}
116+
var content = Assert.Single(assistant.Content);
117+
Assert.NotEmpty(content.OutputTextAnnotations);
118+
Assert.Contains(content.OutputTextAnnotations,
119+
x => x.Kind == ResponseMessageAnnotationKind.UriCitation &&
120+
x.UriCitationUri.StartsWith("https://catedralaltapatagonia.com/tarifas/"));
115121

122+
}
116123
}

src/AI.Tests/RetrievalTests.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,11 @@ public class RetrievalTests(ITestOutputHelper output)
1111
[InlineData("gpt-4.1-nano", "What's the battery life in an iPhone 15?", true)]
1212
public async Task CanRetrieveContent(string model, string question, bool empty = false)
1313
{
14-
var client = new OpenAI.OpenAIClient(Configuration["OPENAI_API_KEY"]);
14+
var client = new global::OpenAI.OpenAIClient(Configuration["OPENAI_API_KEY"]);
1515
var store = client.GetVectorStoreClient().CreateVectorStore(true);
1616
try
1717
{
18-
var file = client.GetOpenAIFileClient().UploadFile("Content/LNS0004592.md", OpenAI.Files.FileUploadPurpose.Assistants);
18+
var file = client.GetOpenAIFileClient().UploadFile("Content/LNS0004592.md", global::OpenAI.Files.FileUploadPurpose.Assistants);
1919
try
2020
{
2121
client.GetVectorStoreClient().AddFileToVectorStore(store.VectorStoreId, file.Value.Id, true);

src/AI.Tests/ToolsTests.cs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
using System.ComponentModel;
2+
using Devlooped.Extensions.AI.OpenAI;
23
using Microsoft.Extensions.AI;
34
using static ConfigurationExtensions;
45

@@ -18,7 +19,7 @@ public async Task RunToolResult()
1819
};
1920

2021
var client = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1",
21-
OpenAI.OpenAIClientOptions.WriteTo(output))
22+
global::OpenAI.OpenAIClientOptions.WriteTo(output))
2223
.AsBuilder()
2324
.UseFunctionInvocation()
2425
.Build();
@@ -50,7 +51,7 @@ public async Task RunToolTerminateResult()
5051
};
5152

5253
var client = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1",
53-
OpenAI.OpenAIClientOptions.WriteTo(output))
54+
global::OpenAI.OpenAIClientOptions.WriteTo(output))
5455
.AsBuilder()
5556
.UseFunctionInvocation()
5657
.Build();
@@ -82,7 +83,7 @@ public async Task RunToolExceptionOutcome()
8283
};
8384

8485
var client = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1",
85-
OpenAI.OpenAIClientOptions.WriteTo(output))
86+
global::OpenAI.OpenAIClientOptions.WriteTo(output))
8687
.AsBuilder()
8788
.UseFunctionInvocation()
8889
.Build();

src/AI/Grok/GrokChatClient.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
using Microsoft.Extensions.AI;
88
using OpenAI;
99

10-
namespace Devlooped.Extensions.AI;
10+
namespace Devlooped.Extensions.AI.Grok;
1111

1212
/// <summary>
1313
/// An <see cref="IChatClient"/> implementation for Grok.
@@ -100,9 +100,9 @@ IChatClient GetChatClient(string modelId) => clients.GetOrAdd(modelId, model
100100
{
101101
result.ReasoningEffortLevel = grok.ReasoningEffort switch
102102
{
103-
ReasoningEffort.High => OpenAI.Chat.ChatReasoningEffortLevel.High,
103+
ReasoningEffort.High => global::OpenAI.Chat.ChatReasoningEffortLevel.High,
104104
// Grok does not support Medium, so we map it to Low too
105-
_ => OpenAI.Chat.ChatReasoningEffortLevel.Low,
105+
_ => global::OpenAI.Chat.ChatReasoningEffortLevel.Low,
106106
};
107107
}
108108

@@ -119,7 +119,7 @@ void IDisposable.Dispose() { }
119119
// Allows creating the base OpenAIClient with a pre-created pipeline.
120120
class PipelineClient(ClientPipeline pipeline, OpenAIClientOptions options) : OpenAIClient(pipeline, options) { }
121121

122-
class GrokChatWebSearchOptions : OpenAI.Chat.ChatWebSearchOptions
122+
class GrokChatWebSearchOptions : global::OpenAI.Chat.ChatWebSearchOptions
123123
{
124124
public GrokSearch Mode { get; set; } = GrokSearch.Auto;
125125
public DateOnly? FromDate { get; set; }
@@ -174,7 +174,7 @@ class LowercaseNamingPolicy : JsonNamingPolicy
174174
}
175175
}
176176

177-
class GrokCompletionOptions : OpenAI.Chat.ChatCompletionOptions
177+
class GrokCompletionOptions : global::OpenAI.Chat.ChatCompletionOptions
178178
{
179179
protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions? options)
180180
{

src/AI/Grok/GrokChatOptions.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
using Microsoft.Extensions.AI;
22

3-
namespace Devlooped.Extensions.AI;
3+
namespace Devlooped.Extensions.AI.Grok;
44

55
/// <summary>
66
/// Grok-specific chat options that extend the base <see cref="ChatOptions"/>

src/AI/Grok/GrokClient.cs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
using Microsoft.Extensions.AI;
55
using OpenAI;
66

7-
namespace Devlooped.Extensions.AI;
7+
namespace Devlooped.Extensions.AI.Grok;
88

99
/// <summary>
1010
/// Provides an OpenAI compability client for Grok. It's recommended you
@@ -26,7 +26,7 @@ public class GrokClient(string apiKey, OpenAIClientOptions? options = null)
2626
/// Returns an adapter that surfaces an <see cref="IChatClient"/> interface that
2727
/// can be used directly in the <see cref="ChatClientBuilder"/> pipeline builder.
2828
/// </summary>
29-
public override OpenAI.Chat.ChatClient GetChatClient(string model) => new GrokChatClientAdapter(this, model);
29+
public override global::OpenAI.Chat.ChatClient GetChatClient(string model) => new GrokChatClientAdapter(this, model);
3030

3131
static OpenAIClientOptions EnsureEndpoint(OpenAIClientOptions? options)
3232
{
@@ -39,7 +39,7 @@ static OpenAIClientOptions EnsureEndpoint(OpenAIClientOptions? options)
3939
// OpenAI in MEAI docs. Most typical case would be to just create an <see cref="GrokChatClient"/> directly.
4040
// This throws on any non-IChatClient invoked methods in the AsIChatClient adapter, and
4141
// forwards the IChatClient methods to the GrokChatClient implementation which is cached per client.
42-
class GrokChatClientAdapter(GrokClient client, string model) : OpenAI.Chat.ChatClient, IChatClient
42+
class GrokChatClientAdapter(GrokClient client, string model) : global::OpenAI.Chat.ChatClient, IChatClient
4343
{
4444
void IDisposable.Dispose() { }
4545

@@ -60,36 +60,36 @@ IAsyncEnumerable<ChatResponseUpdate> IChatClient.GetStreamingResponseAsync(IEnum
6060
=> client.GetChatClientImpl(options?.ModelId ?? model).GetStreamingResponseAsync(messages, options, cancellation);
6161

6262
// These are the only two methods actually invoked by the AsIChatClient adapter from M.E.AI.OpenAI
63-
public override Task<ClientResult<OpenAI.Chat.ChatCompletion>> CompleteChatAsync(IEnumerable<OpenAI.Chat.ChatMessage>? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
63+
public override Task<ClientResult<global::OpenAI.Chat.ChatCompletion>> CompleteChatAsync(IEnumerable<global::OpenAI.Chat.ChatMessage>? messages, global::OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
6464
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)} instead of invoking {nameof(OpenAIClientExtensions.AsIChatClient)} on this instance.");
6565

66-
public override AsyncCollectionResult<OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreamingAsync(IEnumerable<OpenAI.Chat.ChatMessage>? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
66+
public override AsyncCollectionResult<global::OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreamingAsync(IEnumerable<global::OpenAI.Chat.ChatMessage>? messages, global::OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
6767
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)} instead of invoking {nameof(OpenAIClientExtensions.AsIChatClient)} on this instance.");
6868

6969
#region Unsupported
7070

7171
public override ClientResult CompleteChat(BinaryContent? content, RequestOptions? options = null)
7272
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
7373

74-
public override ClientResult<OpenAI.Chat.ChatCompletion> CompleteChat(IEnumerable<OpenAI.Chat.ChatMessage>? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
74+
public override ClientResult<global::OpenAI.Chat.ChatCompletion> CompleteChat(IEnumerable<global::OpenAI.Chat.ChatMessage>? messages, global::OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
7575
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
7676

77-
public override ClientResult<OpenAI.Chat.ChatCompletion> CompleteChat(params OpenAI.Chat.ChatMessage[] messages)
77+
public override ClientResult<global::OpenAI.Chat.ChatCompletion> CompleteChat(params global::OpenAI.Chat.ChatMessage[] messages)
7878
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
7979

8080
public override Task<ClientResult> CompleteChatAsync(BinaryContent? content, RequestOptions? options = null)
8181
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
8282

83-
public override Task<ClientResult<OpenAI.Chat.ChatCompletion>> CompleteChatAsync(params OpenAI.Chat.ChatMessage[] messages)
83+
public override Task<ClientResult<global::OpenAI.Chat.ChatCompletion>> CompleteChatAsync(params global::OpenAI.Chat.ChatMessage[] messages)
8484
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
8585

86-
public override CollectionResult<OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreaming(IEnumerable<OpenAI.Chat.ChatMessage>? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
86+
public override CollectionResult<global::OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreaming(IEnumerable<global::OpenAI.Chat.ChatMessage>? messages, global::OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
8787
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
8888

89-
public override CollectionResult<OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreaming(params OpenAI.Chat.ChatMessage[] messages)
89+
public override CollectionResult<global::OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreaming(params global::OpenAI.Chat.ChatMessage[] messages)
9090
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
9191

92-
public override AsyncCollectionResult<OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreamingAsync(params OpenAI.Chat.ChatMessage[] messages)
92+
public override AsyncCollectionResult<global::OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreamingAsync(params global::OpenAI.Chat.ChatMessage[] messages)
9393
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
9494

9595
#endregion

src/AI/Grok/GrokSearchTool.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
using System.Text.Json.Serialization;
22
using Microsoft.Extensions.AI;
33

4-
namespace Devlooped.Extensions.AI;
4+
namespace Devlooped.Extensions.AI.Grok;
55

66
/// <summary>
77
/// Enables or disables Grok's live search capabilities.

src/AI/OpenAI/OpenAIChatClient.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
using OpenAI;
66
using OpenAI.Responses;
77

8-
namespace Devlooped.Extensions.AI;
8+
namespace Devlooped.Extensions.AI.OpenAI;
99

1010
/// <summary>
1111
/// An <see cref="IChatClient"/> implementation for OpenAI.

0 commit comments

Comments
 (0)