diff --git a/Extensions.AI.sln b/Extensions.AI.sln
index c55cb54..2f103a6 100644
--- a/Extensions.AI.sln
+++ b/Extensions.AI.sln
@@ -13,6 +13,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Weaving", "src\Weaving\Weav
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Samples", "src\Samples\Samples.csproj", "{4B78F0E3-E03B-4283-AB0B-B1D76CAEF1BC}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AI.CodeAnalysis", "src\AI.CodeAnalysis\AI.CodeAnalysis.csproj", "{F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -83,6 +85,18 @@ Global
{4B78F0E3-E03B-4283-AB0B-B1D76CAEF1BC}.Release|x64.Build.0 = Release|Any CPU
{4B78F0E3-E03B-4283-AB0B-B1D76CAEF1BC}.Release|x86.ActiveCfg = Release|Any CPU
{4B78F0E3-E03B-4283-AB0B-B1D76CAEF1BC}.Release|x86.Build.0 = Release|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Debug|x64.Build.0 = Debug|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Debug|x86.Build.0 = Debug|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Release|Any CPU.Build.0 = Release|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Release|x64.ActiveCfg = Release|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Release|x64.Build.0 = Release|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Release|x86.ActiveCfg = Release|Any CPU
+ {F6A9F74B-5C63-4C53-9745-F00BE40AF8C8}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/src/AI.CodeAnalysis/AI.CodeAnalysis.csproj b/src/AI.CodeAnalysis/AI.CodeAnalysis.csproj
new file mode 100644
index 0000000..b371c3b
--- /dev/null
+++ b/src/AI.CodeAnalysis/AI.CodeAnalysis.csproj
@@ -0,0 +1,20 @@
+
+
+
+ netstandard2.0
+ true
+ analyzers/dotnet/roslyn4.0/cs
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/AI.CodeAnalysis/ChatClientExtensionsGenerator.cs b/src/AI.CodeAnalysis/ChatClientExtensionsGenerator.cs
new file mode 100644
index 0000000..bb26407
--- /dev/null
+++ b/src/AI.CodeAnalysis/ChatClientExtensionsGenerator.cs
@@ -0,0 +1,27 @@
+using System.Text;
+using Microsoft.CodeAnalysis;
+using Microsoft.CodeAnalysis.Text;
+
+namespace Devlooped.Extensions.AI;
+
+///
+/// This generator produces the source code so that it
+/// exists in the user's target compilation and can successfully overload (and override)
+/// the OpenAIClientExtensions.AsIChatClient that would otherwise be used. We
+/// need this to ensure that the can be used directly as an
+/// IChatClient instead of wrapping it in the M.E.AI.OpenAI adapter.
+///
+[Generator(LanguageNames.CSharp)]
+public class ChatClientExtensionsGenerator : IIncrementalGenerator
+{
+ public void Initialize(IncrementalGeneratorInitializationContext context)
+ {
+ context.RegisterSourceOutput(context.CompilationProvider,
+ (spc, _) =>
+ {
+ spc.AddSource(
+ $"{nameof(ThisAssembly.Resources.ChatClientExtensions)}.g.cs",
+ SourceText.From(ThisAssembly.Resources.ChatClientExtensions.Text, Encoding.UTF8));
+ });
+ }
+}
diff --git a/src/AI.Tests/AI.Tests.csproj b/src/AI.Tests/AI.Tests.csproj
index ee46d9d..0afe837 100644
--- a/src/AI.Tests/AI.Tests.csproj
+++ b/src/AI.Tests/AI.Tests.csproj
@@ -3,8 +3,13 @@
net8.0;net10.0
OPENAI001;$(NoWarn)
+ true
+
+
+
+
diff --git a/src/AI.Tests/Extensions/PipelineTestOutput.cs b/src/AI.Tests/Extensions/PipelineTestOutput.cs
new file mode 100644
index 0000000..8020c0c
--- /dev/null
+++ b/src/AI.Tests/Extensions/PipelineTestOutput.cs
@@ -0,0 +1,68 @@
+using System.ClientModel.Primitives;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+
+namespace Devlooped.Extensions.AI;
+
+public static class PipelineTestOutput
+{
+ ///
+ /// Sets a that renders HTTP messages to the
+ /// console using Spectre.Console rich JSON formatting, but only if the console is interactive.
+ ///
+ /// The options type to configure for HTTP logging.
+ /// The options instance to configure.
+ ///
+ /// NOTE: this is the lowst-level logging after all chat pipeline processing has been done.
+ ///
+ /// If the options already provide a transport, it will be wrapped with the console
+ /// logging transport to minimize the impact on existing configurations.
+ ///
+ ///
+ public static TOptions UseTestOutput(this TOptions pipelineOptions, ITestOutputHelper output)
+ where TOptions : ClientPipelineOptions
+ {
+ pipelineOptions.Transport = new TestPipelineTransport(pipelineOptions.Transport ?? HttpClientPipelineTransport.Shared, output);
+
+ return pipelineOptions;
+ }
+}
+
+public class TestPipelineTransport(PipelineTransport inner, ITestOutputHelper? output = null) : PipelineTransport
+{
+ static readonly JsonSerializerOptions options = new JsonSerializerOptions(JsonSerializerDefaults.General)
+ {
+ WriteIndented = true,
+ };
+
+ public List Requests { get; } = [];
+ public List Responses { get; } = [];
+
+ protected override async ValueTask ProcessCoreAsync(PipelineMessage message)
+ {
+ message.BufferResponse = true;
+ await inner.ProcessAsync(message);
+
+ if (message.Request.Content is not null)
+ {
+ using var memory = new MemoryStream();
+ message.Request.Content.WriteTo(memory);
+ memory.Position = 0;
+ using var reader = new StreamReader(memory);
+ var content = await reader.ReadToEndAsync();
+ var node = JsonNode.Parse(content);
+ Requests.Add(node!);
+ output?.WriteLine(node!.ToJsonString(options));
+ }
+
+ if (message.Response != null)
+ {
+ var node = JsonNode.Parse(message.Response.Content.ToString());
+ Responses.Add(node!);
+ output?.WriteLine(node!.ToJsonString(options));
+ }
+ }
+
+ protected override PipelineMessage CreateMessageCore() => inner.CreateMessage();
+ protected override void ProcessCore(PipelineMessage message) => inner.Process(message);
+}
diff --git a/src/AI.Tests/GrokTests.cs b/src/AI.Tests/GrokTests.cs
index 364f484..5b788e9 100644
--- a/src/AI.Tests/GrokTests.cs
+++ b/src/AI.Tests/GrokTests.cs
@@ -1,9 +1,11 @@
-namespace Devlooped.Extensions.AI;
-
+using System.ClientModel.Primitives;
+using System.Text.Json.Nodes;
using Microsoft.Extensions.AI;
using static ConfigurationExtensions;
-public class GrokTests
+namespace Devlooped.Extensions.AI;
+
+public class GrokTests(ITestOutputHelper output)
{
[SecretsFact("XAI_API_KEY")]
public async Task GrokInvokesTools()
@@ -23,12 +25,18 @@ public async Task GrokInvokesTools()
Tools = [AIFunctionFactory.Create(() => DateTimeOffset.Now.ToString("O"), "get_date")]
};
- var response = await grok.GetResponseAsync(messages, options);
+ var client = grok.GetChatClient("grok-3");
+ var chat = Assert.IsType(client, false);
+
+ var response = await chat.GetResponseAsync(messages, options);
var getdate = response.Messages
.SelectMany(x => x.Contents.OfType())
.Any(x => x.Name == "get_date");
Assert.True(getdate);
+ // NOTE: the chat client was requested as grok-3 but the chat options wanted a
+ // different model and the grok client honors that choice.
+ Assert.Equal("grok-3-mini", response.ModelId);
}
[SecretsFact("XAI_API_KEY")]
@@ -40,7 +48,11 @@ public async Task GrokInvokesToolAndSearch()
{ "user", "What's Tesla stock worth today?" },
};
- var grok = new GrokClient(Configuration["XAI_API_KEY"]!)
+ var transport = new TestPipelineTransport(HttpClientPipelineTransport.Shared, output);
+
+ var grok = new GrokClient(Configuration["XAI_API_KEY"]!, new OpenAI.OpenAIClientOptions() { Transport = transport })
+ .GetChatClient("grok-3")
+ .AsIChatClient()
.AsBuilder()
.UseFunctionInvocation()
.Build();
@@ -54,14 +66,30 @@ public async Task GrokInvokesToolAndSearch()
var response = await grok.GetResponseAsync(messages, options);
+ // assert that the request contains the following node
+ // "search_parameters": {
+ // "mode": "on"
+ //}
+ Assert.All(transport.Requests, x =>
+ {
+ var search = Assert.IsType(x["search_parameters"]);
+ Assert.Equal("on", search["mode"]?.GetValue());
+ });
+
// The get_date result shows up as a tool role
Assert.Contains(response.Messages, x => x.Role == ChatRole.Tool);
- var text = response.Text;
+ // Citations include nasdaq.com at least as a web search source
+ var node = transport.Responses.LastOrDefault();
+ Assert.NotNull(node);
+ var citations = Assert.IsType(node["citations"], false);
+ var yahoo = citations.Where(x => x != null).Any(x => x!.ToString().Contains("https://finance.yahoo.com/quote/TSLA/", StringComparison.Ordinal));
- Assert.Contains("TSLA", text);
- Assert.Contains("$", text);
- Assert.Contains("Nasdaq", text, StringComparison.OrdinalIgnoreCase);
+ Assert.True(yahoo, "Expected at least one citation to nasdaq.com");
+
+ // NOTE: the chat client was requested as grok-3 but the chat options wanted a
+ // different model and the grok client honors that choice.
+ Assert.Equal("grok-3-mini", response.ModelId);
}
[SecretsFact("XAI_API_KEY")]
@@ -73,20 +101,43 @@ public async Task GrokInvokesHostedSearchTool()
{ "user", "What's Tesla stock worth today? Search X and the news for latest info." },
};
- var grok = new GrokClient(Configuration["XAI_API_KEY"]!);
+ var transport = new TestPipelineTransport(HttpClientPipelineTransport.Shared, output);
+
+ var grok = new GrokClient(Configuration["XAI_API_KEY"]!, new OpenAI.OpenAIClientOptions() { Transport = transport });
+ var client = grok.GetChatClient("grok-3");
+ var chat = Assert.IsType(client, false);
var options = new ChatOptions
{
- ModelId = "grok-3",
Tools = [new HostedWebSearchTool()]
};
- var response = await grok.GetResponseAsync(messages, options);
+ var response = await chat.GetResponseAsync(messages, options);
var text = response.Text;
Assert.Contains("TSLA", text);
- Assert.Contains("$", text);
- Assert.Contains("Nasdaq", text, StringComparison.OrdinalIgnoreCase);
+
+ // assert that the request contains the following node
+ // "search_parameters": {
+ // "mode": "auto"
+ //}
+ Assert.All(transport.Requests, x =>
+ {
+ var search = Assert.IsType(x["search_parameters"]);
+ Assert.Equal("auto", search["mode"]?.GetValue());
+ });
+
+ // Citations include nasdaq.com at least as a web search source
+ Assert.Single(transport.Responses);
+ var node = transport.Responses[0];
+ Assert.NotNull(node);
+ var citations = Assert.IsType(node["citations"], false);
+ var yahoo = citations.Where(x => x != null).Any(x => x!.ToString().Contains("https://finance.yahoo.com/quote/TSLA/", StringComparison.Ordinal));
+
+ Assert.True(yahoo, "Expected at least one citation to nasdaq.com");
+
+ // Uses the default model set by the client when we asked for it
+ Assert.Equal("grok-3", response.ModelId);
}
[SecretsFact("XAI_API_KEY")]
@@ -99,6 +150,8 @@ public async Task GrokThinksHard()
};
var grok = new GrokClient(Configuration["XAI_API_KEY"]!)
+ .GetChatClient("grok-3")
+ .AsIChatClient()
.AsBuilder()
.UseFunctionInvocation()
.Build();
@@ -115,5 +168,8 @@ public async Task GrokThinksHard()
var text = response.Text;
Assert.Contains("48 years", text);
+ // NOTE: the chat client was requested as grok-3 but the chat options wanted a
+ // different model and the grok client honors that choice.
+ Assert.StartsWith("grok-3-mini", response.ModelId);
}
}
diff --git a/src/AI/AI.csproj b/src/AI/AI.csproj
index e06d105..3c1a94d 100644
--- a/src/AI/AI.csproj
+++ b/src/AI/AI.csproj
@@ -8,16 +8,21 @@
-
+
+
+
+
+
+
diff --git a/src/AI/ChatClientExtensions.cs b/src/AI/ChatClientExtensions.cs
new file mode 100644
index 0000000..cae33c1
--- /dev/null
+++ b/src/AI/ChatClientExtensions.cs
@@ -0,0 +1,13 @@
+using Microsoft.Extensions.AI;
+using OpenAI.Chat;
+
+///
+/// Smarter casting to when the target
+/// already implements the interface.
+///
+static class ChatClientExtensions
+{
+ /// Gets an for use with this .
+ public static IChatClient AsIChatClient(this ChatClient client) =>
+ client as IChatClient ?? OpenAIClientExtensions.AsIChatClient(client);
+}
\ No newline at end of file
diff --git a/src/AI/Console/JsonConsoleOptions.cs b/src/AI/Console/JsonConsoleOptions.cs
index 4661e70..080764d 100644
--- a/src/AI/Console/JsonConsoleOptions.cs
+++ b/src/AI/Console/JsonConsoleOptions.cs
@@ -115,7 +115,9 @@ internal Panel CreatePanel(object value)
return panel;
}
+#pragma warning disable CS9113 // Parameter is unread. BOGUS
sealed class WrappedJsonText(string json, int maxWidth) : Renderable
+#pragma warning restore CS9113 // Parameter is unread. BOGUS
{
readonly JsonText jsonText = new(json);
diff --git a/src/AI/Devlooped.Extensions.AI.props b/src/AI/Devlooped.Extensions.AI.props
index f5c82f0..f77b491 100644
--- a/src/AI/Devlooped.Extensions.AI.props
+++ b/src/AI/Devlooped.Extensions.AI.props
@@ -1,15 +1,11 @@
-
- enable
- true
-
+
-
-
+
-
+
\ No newline at end of file
diff --git a/src/AI/Devlooped.Extensions.AI.targets b/src/AI/Devlooped.Extensions.AI.targets
new file mode 100644
index 0000000..3d3894b
--- /dev/null
+++ b/src/AI/Devlooped.Extensions.AI.targets
@@ -0,0 +1,3 @@
+
+
+
\ No newline at end of file
diff --git a/src/AI/Grok/GrokClient.cs b/src/AI/Grok/GrokClient.cs
index 68874dd..25d9135 100644
--- a/src/AI/Grok/GrokClient.cs
+++ b/src/AI/Grok/GrokClient.cs
@@ -7,10 +7,12 @@
namespace Devlooped.Extensions.AI;
-public class GrokClient(string apiKey, GrokClientOptions options)
- : OpenAIClient(new ApiKeyCredential(apiKey), options), IChatClient
+public class GrokClient(string apiKey, OpenAIClientOptions options)
+ : OpenAIClient(new ApiKeyCredential(apiKey), EnsureEndpoint(options))
{
- readonly GrokClientOptions clientOptions = options;
+ // This allows ChatOptions to request a different model than the one configured
+ // in the chat pipeline when GetChatClient(model).AsIChatClient() is called at registration time.
+ readonly ConcurrentDictionary adapters = new();
readonly ConcurrentDictionary clients = new();
public GrokClient(string apiKey)
@@ -18,20 +20,31 @@ public GrokClient(string apiKey)
{
}
- void IDisposable.Dispose() { }
- object? IChatClient.GetService(Type serviceType, object? serviceKey) => default;
-
- Task IChatClient.GetResponseAsync(IEnumerable messages, ChatOptions? options, CancellationToken cancellation)
- => GetClient(options).GetResponseAsync(messages, SetOptions(options), cancellation);
-
- IAsyncEnumerable IChatClient.GetStreamingResponseAsync(IEnumerable messages, ChatOptions? options, CancellationToken cancellation)
- => GetClient(options).GetStreamingResponseAsync(messages, SetOptions(options), cancellation);
+ IChatClient GetChatClientImpl(string model)
+ // Gets the real chat client by prefixing so the overload invokes the base.
+ => clients.GetOrAdd(model, key => GetChatClient("__" + model).AsIChatClient());
+
+ ///
+ /// Returns an adapter that surfaces an interface that
+ /// can be used directly in the pipeline builder.
+ ///
+ public override OpenAI.Chat.ChatClient GetChatClient(string model)
+ // We need to differentiate getting a real chat client vs an adapter for pipeline setup.
+ // The former is invoked by the adapter when it needs to invoke the actual chat client,
+ // which goes through the GetChatClientImpl. Since the method override is necessary to
+ // satisfy the usage pattern when configuring OpenAIClient with M.E.AI, we differentiate
+ // the internal call by adding a prefix we remove before calling downstream.
+ => model.StartsWith("__") ? base.GetChatClient(model[2..]) : new GrokChatClientAdapter(this, model);
+
+ static OpenAIClientOptions EnsureEndpoint(OpenAIClientOptions options)
+ {
+ if (options.Endpoint is null)
+ options.Endpoint = new Uri("https://api.x.ai/v1");
- IChatClient GetClient(ChatOptions? options) => clients.GetOrAdd(
- options?.ModelId ?? clientOptions.Model,
- model => base.GetChatClient(model).AsIChatClient());
+ return options;
+ }
- ChatOptions? SetOptions(ChatOptions? options)
+ static ChatOptions? SetOptions(ChatOptions? options)
{
if (options is null)
return null;
@@ -40,23 +53,23 @@ IChatClient GetClient(ChatOptions? options) => clients.GetOrAdd(
{
var result = new GrokCompletionOptions();
var grok = options as GrokChatOptions;
+ var search = grok?.Search;
if (options.Tools != null)
{
if (options.Tools.OfType().FirstOrDefault() is GrokSearchTool grokSearch)
- result.Search = grokSearch.Mode;
+ search = grokSearch.Mode;
else if (options.Tools.OfType().FirstOrDefault() is HostedWebSearchTool webSearch)
- result.Search = GrokSearch.Auto;
+ search = GrokSearch.Auto;
// Grok doesn't support any other hosted search tools, so remove remaining ones
// so they don't get copied over by the OpenAI client.
- options.Tools = [.. options.Tools.Where(tool => tool is not HostedWebSearchTool)];
- }
- else if (grok is not null)
- {
- result.Search = grok.Search;
+ //options.Tools = [.. options.Tools.Where(tool => tool is not HostedWebSearchTool)];
}
+ if (search != null)
+ result.Search = search.Value;
+
if (grok?.ReasoningEffort != null)
{
result.ReasoningEffortLevel = grok.ReasoningEffort switch
@@ -82,7 +95,7 @@ class GrokCompletionOptions : OpenAI.Chat.ChatCompletionOptions
{
public GrokSearch Search { get; set; } = GrokSearch.Auto;
- protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions? options)
{
base.JsonModelWriteCore(writer, options);
@@ -93,5 +106,61 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri
writer.WriteEndObject();
}
}
+
+ public class GrokChatClientAdapter(GrokClient client, string model) : OpenAI.Chat.ChatClient, IChatClient
+ {
+ void IDisposable.Dispose() { }
+
+ object? IChatClient.GetService(Type serviceType, object? serviceKey) => client.GetChatClientImpl(model).GetService(serviceType, serviceKey);
+
+ ///
+ /// Routes the request to a client that matches the options' ModelId (if set), or
+ /// the default model when the adapter was created.
+ ///
+ Task IChatClient.GetResponseAsync(IEnumerable messages, ChatOptions? options, CancellationToken cancellation)
+ => client.GetChatClientImpl(options?.ModelId ?? model).GetResponseAsync(messages, SetOptions(options), cancellation);
+
+ ///
+ /// Routes the request to a client that matches the options' ModelId (if set), or
+ /// the default model when the adapter was created.
+ ///
+ IAsyncEnumerable IChatClient.GetStreamingResponseAsync(IEnumerable messages, ChatOptions? options, CancellationToken cancellation)
+ => client.GetChatClientImpl(options?.ModelId ?? model).GetStreamingResponseAsync(messages, SetOptions(options), cancellation);
+
+ // These are the only two methods actually invoked by the AsIChatClient adapter from M.E.AI.OpenAI
+ public override Task> CompleteChatAsync(IEnumerable? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)} instead of invoking {nameof(OpenAIClientExtensions.AsIChatClient)} on this instance.");
+
+ public override AsyncCollectionResult CompleteChatStreamingAsync(IEnumerable? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)} instead of invoking {nameof(OpenAIClientExtensions.AsIChatClient)} on this instance.");
+
+ #region Unsupported
+
+ public override ClientResult CompleteChat(BinaryContent? content, RequestOptions? options = null)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
+
+ public override ClientResult CompleteChat(IEnumerable? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
+
+ public override ClientResult CompleteChat(params OpenAI.Chat.ChatMessage[] messages)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
+
+ public override Task CompleteChatAsync(BinaryContent? content, RequestOptions? options = null)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
+
+ public override Task> CompleteChatAsync(params OpenAI.Chat.ChatMessage[] messages)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
+
+ public override CollectionResult CompleteChatStreaming(IEnumerable? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
+
+ public override CollectionResult CompleteChatStreaming(params OpenAI.Chat.ChatMessage[] messages)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
+
+ public override AsyncCollectionResult CompleteChatStreamingAsync(params OpenAI.Chat.ChatMessage[] messages)
+ => throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");
+
+ #endregion
+ }
}
diff --git a/src/AI/Grok/GrokClientOptions.cs b/src/AI/Grok/GrokClientOptions.cs
deleted file mode 100644
index 8943d36..0000000
--- a/src/AI/Grok/GrokClientOptions.cs
+++ /dev/null
@@ -1,16 +0,0 @@
-using OpenAI;
-
-namespace Devlooped.Extensions.AI;
-
-public class GrokClientOptions : OpenAIClientOptions
-{
- public GrokClientOptions() : this("grok-3") { }
-
- public GrokClientOptions(string model)
- {
- Endpoint = new Uri("https://api.x.ai/v1");
- Model = Throw.IfNullOrEmpty(model);
- }
-
- public string Model { get; }
-}
diff --git a/src/AI/Grok/GrokServiceCollectionExtensions.cs b/src/AI/Grok/GrokServiceCollectionExtensions.cs
deleted file mode 100644
index 9352b8f..0000000
--- a/src/AI/Grok/GrokServiceCollectionExtensions.cs
+++ /dev/null
@@ -1,27 +0,0 @@
-using System.ComponentModel;
-using Devlooped.Extensions.AI;
-using Microsoft.Extensions.AI;
-using Microsoft.Extensions.Configuration;
-using Microsoft.Extensions.DependencyInjection;
-
-namespace Devlooped.Extensions.AI;
-
-///
-/// Extensions for registering the as a chat client in the service collection.
-///
-[EditorBrowsable(EditorBrowsableState.Never)]
-public static class GrokServiceCollectionExtensions
-{
- extension(IServiceCollection services)
- {
- ///
- /// Registers the as a chat client in the service collection.
- ///
- /// The factory to create the Grok client.
- /// The optional service lifetime.
- /// The to further build the pipeline.
- public ChatClientBuilder AddGrok(Func factory, ServiceLifetime lifetime = ServiceLifetime.Singleton)
- => services.AddChatClient(services
- => factory(services.GetRequiredService()), lifetime);
- }
-}
diff --git a/src/Samples/Program.cs b/src/Samples/Program.cs
index 421f002..073e02b 100644
--- a/src/Samples/Program.cs
+++ b/src/Samples/Program.cs
@@ -6,16 +6,16 @@
};
// Env supports .env as well as all standard .NET configuration sources
-var grok = new GrokClient(Throw.IfNullOrEmpty(Env.Get("XAI_API_KEY")), new GrokClientOptions()
+var grok = new GrokClient(Throw.IfNullOrEmpty(Env.Get("XAI_API_KEY")), new OpenAI.OpenAIClientOptions()
.UseJsonConsoleLogging(new() { WrapLength = 80 }));
var options = new ChatOptions
{
- ModelId = "grok-3",
// Enables Live Search
Tools = [new HostedWebSearchTool()]
};
-var response = await grok.GetResponseAsync(messages, options);
+var chat = grok.GetChatClient("grok-3").AsIChatClient();
+var response = await chat.GetResponseAsync(messages, options);
AnsiConsole.MarkupLine($":robot: {response.Text.EscapeMarkup()}");
\ No newline at end of file