diff --git a/Cnblogs.DashScope.Sdk.sln b/Cnblogs.DashScope.Sdk.sln
index 106b3de..26d0161 100644
--- a/Cnblogs.DashScope.Sdk.sln
+++ b/Cnblogs.DashScope.Sdk.sln
@@ -18,6 +18,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cnblogs.DashScope.Core", "s
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cnblogs.DashScope.Sdk.SnapshotGenerator", "test\Cnblogs.DashScope.Sdk.SnapshotGenerator\Cnblogs.DashScope.Sdk.SnapshotGenerator.csproj", "{5088DE77-1CE3-46FB-B9D0-27A6C9A5EED1}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cnblogs.DashScope.AI", "src\Cnblogs.DashScope.AI\Cnblogs.DashScope.AI.csproj", "{5D5AD75A-8084-4738-AC56-B8A23E649452}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -30,6 +32,7 @@ Global
{C910495B-87AB-4AC1-989C-B6720695A139} = {008988ED-0A3B-4272-BCC3-7B4110699345}
{CC389455-A3EA-4F09-B524-4DC351A1E1AA} = {008988ED-0A3B-4272-BCC3-7B4110699345}
{5088DE77-1CE3-46FB-B9D0-27A6C9A5EED1} = {CFC8ECB3-5248-46CD-A56C-EC088F2A3804}
+ {5D5AD75A-8084-4738-AC56-B8A23E649452} = {008988ED-0A3B-4272-BCC3-7B4110699345}
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{FA6A118A-8D26-4B7A-9952-8504B8A0025B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
@@ -56,5 +59,9 @@ Global
{5088DE77-1CE3-46FB-B9D0-27A6C9A5EED1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5088DE77-1CE3-46FB-B9D0-27A6C9A5EED1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5088DE77-1CE3-46FB-B9D0-27A6C9A5EED1}.Release|Any CPU.Build.0 = Release|Any CPU
+ {5D5AD75A-8084-4738-AC56-B8A23E649452}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {5D5AD75A-8084-4738-AC56-B8A23E649452}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {5D5AD75A-8084-4738-AC56-B8A23E649452}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {5D5AD75A-8084-4738-AC56-B8A23E649452}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal
diff --git a/README.md b/README.md
index 9eda927..d86527f 100644
--- a/README.md
+++ b/README.md
@@ -11,6 +11,16 @@ An unofficial DashScope SDK maintained by Cnblogs.
# Quick Start
+## Using `Microsoft.Extensions.AI`
+
+Install `Cnblogs.Extensions.AI.DashScope` Package
+
+```csharp
+var client = new DashScopeClient("your-api-key").AsChatClient("qwen-max");
+var completion = await client.CompleteAsync("hello");
+Console.WriteLine(completion)
+```
+
## Console App
Install `Cnblogs.DashScope.Sdk` package.
diff --git a/README.zh-Hans.md b/README.zh-Hans.md
index b8cc16b..9a83503 100644
--- a/README.zh-Hans.md
+++ b/README.zh-Hans.md
@@ -11,6 +11,16 @@
# 快速开始
+## 使用 `Microsoft.Extensions.AI` 接口
+
+安装 NuGet 包 `Cnblogs.Extensions.AI.DashScope`
+
+```csharp
+var client = new DashScopeClient("your-api-key").AsChatClient("qwen-max");
+var completion = await client.CompleteAsync("hello");
+Console.WriteLine(completion)
+```
+
## 控制台应用
安装 NuGet 包 `Cnblogs.DashScope.Sdk`。
diff --git a/sample/Cnblogs.DashScope.Sample/Cnblogs.DashScope.Sample.csproj b/sample/Cnblogs.DashScope.Sample/Cnblogs.DashScope.Sample.csproj
index 0aa0fef..a5cb962 100644
--- a/sample/Cnblogs.DashScope.Sample/Cnblogs.DashScope.Sample.csproj
+++ b/sample/Cnblogs.DashScope.Sample/Cnblogs.DashScope.Sample.csproj
@@ -10,6 +10,7 @@
+
@@ -18,4 +19,8 @@
+
+
+
+
diff --git a/sample/Cnblogs.DashScope.Sample/Program.cs b/sample/Cnblogs.DashScope.Sample/Program.cs
index e25fbd6..8245f17 100644
--- a/sample/Cnblogs.DashScope.Sample/Program.cs
+++ b/sample/Cnblogs.DashScope.Sample/Program.cs
@@ -6,9 +6,17 @@
using Cnblogs.DashScope.Sdk.QWen;
using Json.Schema;
using Json.Schema.Generation;
+using Microsoft.Extensions.AI;
-const string apiKey = "sk-**";
-var dashScopeClient = new DashScopeClient(apiKey);
+Console.WriteLine("Reading key from environment variable DASHSCOPE_KEY");
+var apiKey = Environment.GetEnvironmentVariable("DASHSCOPE_API_KEY");
+if (string.IsNullOrEmpty(apiKey))
+{
+ Console.Write("ApiKey > ");
+ apiKey = Console.ReadLine();
+}
+
+var dashScopeClient = new DashScopeClient(apiKey!);
Console.WriteLine("Choose the sample you want to run:");
foreach (var sampleType in Enum.GetValues())
@@ -42,6 +50,12 @@
case SampleType.ChatCompletionWithFiles:
await ChatWithFilesAsync();
break;
+ case SampleType.MicrosoftExtensionsAi:
+ await ChatWithMicrosoftExtensions();
+ break;
+ case SampleType.MicrosoftExtensionsAiToolCall:
+ await dashScopeClient.ToolCallWithExtensionAsync();
+ break;
}
return;
@@ -68,16 +82,17 @@ async Task TextCompletionStreamAsync(string prompt)
async Task ChatStreamAsync()
{
- var history = new List();
+ var history = new List();
while (true)
{
Console.Write("user > ");
var input = Console.ReadLine()!;
- history.Add(ChatMessage.User(input));
- var stream = dashScopeClient.GetQWenChatStreamAsync(
- QWenLlm.QWenMax,
- history,
- new TextGenerationParameters { IncrementalOutput = true, ResultFormat = ResultFormats.Message });
+ history.Add(TextChatMessage.User(input));
+ var stream = dashScopeClient
+ .GetQWenChatStreamAsync(
+ QWenLlm.QWenMax,
+ history,
+ new TextGenerationParameters { IncrementalOutput = true, ResultFormat = ResultFormats.Message });
var role = string.Empty;
var message = new StringBuilder();
await foreach (var modelResponse in stream)
@@ -94,7 +109,7 @@ async Task ChatStreamAsync()
}
Console.WriteLine();
- history.Add(new ChatMessage(role, message.ToString()));
+ history.Add(new TextChatMessage(role, message.ToString()));
}
// ReSharper disable once FunctionNeverReturns
@@ -102,17 +117,17 @@ async Task ChatStreamAsync()
async Task ChatWithFilesAsync()
{
- var history = new List();
+ var history = new List();
Console.WriteLine("uploading file \"test.txt\" ");
var file = new FileInfo("test.txt");
var uploadedFile = await dashScopeClient.UploadFileAsync(file.OpenRead(), file.Name);
Console.WriteLine("file uploaded, id: " + uploadedFile.Id);
Console.WriteLine();
- var fileMessage = ChatMessage.File(uploadedFile.Id);
+ var fileMessage = TextChatMessage.File(uploadedFile.Id);
history.Add(fileMessage);
Console.WriteLine("system > " + fileMessage.Content);
- var userPrompt = ChatMessage.User("该文件的内容是什么");
+ var userPrompt = TextChatMessage.User("该文件的内容是什么");
history.Add(userPrompt);
Console.WriteLine("user > " + userPrompt.Content);
var stream = dashScopeClient.GetQWenChatStreamAsync(
@@ -135,7 +150,7 @@ async Task ChatWithFilesAsync()
}
Console.WriteLine();
- history.Add(new ChatMessage(role, message.ToString()));
+ history.Add(new TextChatMessage(role, message.ToString()));
Console.WriteLine();
Console.WriteLine("Deleting file by id: " + uploadedFile.Id);
@@ -145,7 +160,7 @@ async Task ChatWithFilesAsync()
async Task ChatWithToolsAsync()
{
- var history = new List();
+ var history = new List();
var tools = new List
{
new(
@@ -156,7 +171,7 @@ async Task ChatWithToolsAsync()
new JsonSchemaBuilder().FromType().Build()))
};
var chatParameters = new TextGenerationParameters() { ResultFormat = ResultFormats.Message, Tools = tools };
- var question = ChatMessage.User("请问现在杭州的天气如何?");
+ var question = TextChatMessage.User("请问现在杭州的天气如何?");
history.Add(question);
Console.WriteLine($"{question.Role} > {question.Content}");
@@ -164,11 +179,11 @@ async Task ChatWithToolsAsync()
var toolCallMessage = response.Output.Choices![0].Message;
history.Add(toolCallMessage);
Console.WriteLine(
- $"{toolCallMessage.Role} > {toolCallMessage.ToolCalls![0].Function!.Name}{toolCallMessage.ToolCalls[0].Function!.Arguments}");
+ $"{toolCallMessage.Role} > {toolCallMessage.ToolCalls![0].Function.Name}{toolCallMessage.ToolCalls[0].Function.Arguments}");
var toolResponse = GetWeather(
- JsonSerializer.Deserialize(toolCallMessage.ToolCalls[0].Function!.Arguments!)!);
- var toolMessage = ChatMessage.Tool(toolResponse, nameof(GetWeather));
+ JsonSerializer.Deserialize(toolCallMessage.ToolCalls[0].Function.Arguments!)!);
+ var toolMessage = TextChatMessage.Tool(toolResponse, nameof(GetWeather));
history.Add(toolMessage);
Console.WriteLine($"{toolMessage.Role} > {toolMessage.Content}");
@@ -186,3 +201,17 @@ string GetWeather(WeatherReportParameters parameters)
};
}
}
+
+async Task ChatWithMicrosoftExtensions()
+{
+ Console.WriteLine("Requesting model...");
+ var chatClient = dashScopeClient.AsChatClient("qwen-max");
+ List conversation =
+ [
+ new(ChatRole.System, "You are a helpful AI assistant"),
+ new(ChatRole.User, "What is AI?")
+ ];
+ var response = await chatClient.CompleteAsync(conversation);
+ var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true };
+ Console.WriteLine(JsonSerializer.Serialize(response, serializerOptions));
+}
diff --git a/sample/Cnblogs.DashScope.Sample/SampleType.cs b/sample/Cnblogs.DashScope.Sample/SampleType.cs
index 0d45c06..94d119d 100644
--- a/sample/Cnblogs.DashScope.Sample/SampleType.cs
+++ b/sample/Cnblogs.DashScope.Sample/SampleType.cs
@@ -1,21 +1,18 @@
-using System.ComponentModel;
-
-namespace Cnblogs.DashScope.Sample;
+namespace Cnblogs.DashScope.Sample;
public enum SampleType
{
- [Description("Simple prompt completion")]
TextCompletion,
- [Description("Simple prompt completion with incremental output")]
TextCompletionSse,
- [Description("Conversation between user and assistant")]
ChatCompletion,
- [Description("Conversation with tools")]
ChatCompletionWithTool,
- [Description("Conversation with files")]
- ChatCompletionWithFiles
+ ChatCompletionWithFiles,
+
+ MicrosoftExtensionsAi,
+
+ MicrosoftExtensionsAiToolCall
}
diff --git a/sample/Cnblogs.DashScope.Sample/SampleTypeDescriptor.cs b/sample/Cnblogs.DashScope.Sample/SampleTypeDescriptor.cs
index e46fe95..e39f284 100644
--- a/sample/Cnblogs.DashScope.Sample/SampleTypeDescriptor.cs
+++ b/sample/Cnblogs.DashScope.Sample/SampleTypeDescriptor.cs
@@ -11,6 +11,8 @@ public static string GetDescription(this SampleType sampleType)
SampleType.ChatCompletion => "Conversation between user and assistant",
SampleType.ChatCompletionWithTool => "Function call sample",
SampleType.ChatCompletionWithFiles => "File upload sample using qwen-long",
+ SampleType.MicrosoftExtensionsAi => "Use with Microsoft.Extensions.AI",
+ SampleType.MicrosoftExtensionsAiToolCall => "Use tool call with Microsoft.Extensions.AI interfaces",
_ => throw new ArgumentOutOfRangeException(nameof(sampleType), sampleType, "Unsupported sample option")
};
}
diff --git a/sample/Cnblogs.DashScope.Sample/ToolCallWithExtensions.cs b/sample/Cnblogs.DashScope.Sample/ToolCallWithExtensions.cs
new file mode 100644
index 0000000..f170c7f
--- /dev/null
+++ b/sample/Cnblogs.DashScope.Sample/ToolCallWithExtensions.cs
@@ -0,0 +1,25 @@
+using System.ComponentModel;
+using System.Text.Json;
+using Cnblogs.DashScope.Core;
+using Microsoft.Extensions.AI;
+
+namespace Cnblogs.DashScope.Sample;
+
+public static class ToolCallWithExtensions
+{
+ public static async Task ToolCallWithExtensionAsync(this IDashScopeClient dashScopeClient)
+ {
+ [Description("Gets the weather")]
+ string GetWeather() => Random.Shared.NextDouble() > 0.5 ? "It's sunny" : "It's raining";
+
+ var chatOptions = new ChatOptions { Tools = [AIFunctionFactory.Create(GetWeather)] };
+
+ var client = dashScopeClient.AsChatClient("qwen-max").AsBuilder().UseFunctionInvocation().Build();
+ await foreach (var message in client.CompleteStreamingAsync("What is weather today?", chatOptions))
+ {
+ Console.WriteLine(JsonSerializer.Serialize(message));
+ }
+
+ Console.WriteLine();
+ }
+}
diff --git a/src/Cnblogs.DashScope.AI/Cnblogs.DashScope.AI.csproj b/src/Cnblogs.DashScope.AI/Cnblogs.DashScope.AI.csproj
new file mode 100644
index 0000000..8a22a11
--- /dev/null
+++ b/src/Cnblogs.DashScope.AI/Cnblogs.DashScope.AI.csproj
@@ -0,0 +1,16 @@
+
+
+ Cnblogs.DashScope.AI
+ true
+ Cnblogs;Dashscope;Microsoft.Extensions.AI;Sdk;Embedding;
+ Implementation of generative AI abstractions for DashScope endpoints.
+
+
+
+
+
+
+
+
+
+
diff --git a/src/Cnblogs.DashScope.AI/DashScopeChatClient.cs b/src/Cnblogs.DashScope.AI/DashScopeChatClient.cs
new file mode 100644
index 0000000..b1c69f2
--- /dev/null
+++ b/src/Cnblogs.DashScope.AI/DashScopeChatClient.cs
@@ -0,0 +1,527 @@
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+using Cnblogs.DashScope.Core;
+using Cnblogs.DashScope.Sdk;
+using Json.Schema;
+using Microsoft.Extensions.AI;
+using ChatMessage = Microsoft.Extensions.AI.ChatMessage;
+
+namespace Cnblogs.DashScope.AI;
+
+///
+/// implemented with DashScope.
+///
+public sealed class DashScopeChatClient : IChatClient
+{
+ private readonly IDashScopeClient _dashScopeClient;
+ private readonly string _modelId;
+
+ private static readonly JsonSchema EmptyObjectSchema =
+ JsonSchema.FromText("""{"type":"object","required":[],"properties":{}}""");
+
+ private static readonly TextGenerationParameters
+ DefaultTextGenerationParameter = new() { ResultFormat = "message" };
+
+ ///
+ /// Initialize a new instance of the
+ ///
+ ///
+ ///
+ public DashScopeChatClient(IDashScopeClient dashScopeClient, string modelId)
+ {
+ ArgumentNullException.ThrowIfNull(dashScopeClient, nameof(dashScopeClient));
+ ArgumentNullException.ThrowIfNull(modelId, nameof(modelId));
+
+ _dashScopeClient = dashScopeClient;
+ _modelId = modelId;
+ Metadata = new ChatClientMetadata("dashscope", _dashScopeClient.BaseAddress, _modelId);
+ }
+
+ ///
+ /// Gets or sets to use for any serialization activities related to tool call arguments and results.
+ ///
+ public JsonSerializerOptions ToolCallJsonSerializerOptions { get; set; } = new(JsonSerializerDefaults.Web);
+
+ ///
+ public async Task CompleteAsync(
+ IList chatMessages,
+ ChatOptions? options = null,
+ CancellationToken cancellationToken = default)
+ {
+ var modelId = options?.ModelId ?? _modelId;
+ var useVlRaw = options?.AdditionalProperties?.GetValueOrDefault("useVl")?.ToString();
+ var useVl = string.IsNullOrEmpty(useVlRaw)
+ ? modelId.Contains("qwen-vl", StringComparison.OrdinalIgnoreCase)
+ || chatMessages.Any(c => c.Contents.Any(m => m is ImageContent))
+ : string.Equals(useVlRaw, "true", StringComparison.OrdinalIgnoreCase);
+ if (useVl)
+ {
+ var response = await _dashScopeClient.GetMultimodalGenerationAsync(
+ new ModelRequest()
+ {
+ Input = new MultimodalInput { Messages = ToMultimodalMessages(chatMessages) },
+ Parameters = ToMultimodalParameters(options),
+ Model = modelId
+ },
+ cancellationToken);
+
+ var returnMessage = new ChatMessage()
+ {
+ RawRepresentation = response, Role = ToChatRole(response.Output.Choices[0].Message.Role),
+ };
+
+ returnMessage.Contents.Add(new TextContent(response.Output.Choices[0].Message.Content[0].Text));
+ var completion = new ChatCompletion(returnMessage)
+ {
+ RawRepresentation = response,
+ CompletionId = response.RequestId,
+ CreatedAt = DateTimeOffset.Now,
+ ModelId = modelId,
+ FinishReason = ToFinishReason(response.Output.Choices[0].FinishReason),
+ };
+
+ if (response.Usage != null)
+ {
+ completion.Usage = new UsageDetails()
+ {
+ InputTokenCount = response.Usage.InputTokens, OutputTokenCount = response.Usage.OutputTokens,
+ };
+ }
+
+ return completion;
+ }
+ else
+ {
+ var parameters = ToTextGenerationParameters(options) ?? DefaultTextGenerationParameter;
+ var response = await _dashScopeClient.GetTextCompletionAsync(
+ new ModelRequest()
+ {
+ Input = new TextGenerationInput
+ {
+ Messages = chatMessages.SelectMany(
+ c => ToTextChatMessages(c, parameters.Tools?.ToList())),
+ Tools = ToToolDefinitions(options?.Tools)
+ },
+ Model = modelId,
+ Parameters = parameters
+ },
+ cancellationToken);
+ var returnMessage = ToChatMessage(response.Output.Choices![0].Message);
+ var completion = new ChatCompletion(returnMessage)
+ {
+ RawRepresentation = response,
+ CompletionId = response.RequestId,
+ CreatedAt = DateTimeOffset.Now,
+ ModelId = modelId,
+ FinishReason = ToFinishReason(response.Output.Choices[0].FinishReason),
+ };
+
+ if (response.Usage != null)
+ {
+ completion.Usage = new UsageDetails()
+ {
+ InputTokenCount = response.Usage.InputTokens,
+ OutputTokenCount = response.Usage.OutputTokens,
+ TotalTokenCount = response.Usage.TotalTokens,
+ };
+ }
+
+ return completion;
+ }
+ }
+
+ ///
+ public async IAsyncEnumerable CompleteStreamingAsync(
+ IList chatMessages,
+ ChatOptions? options = null,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ var useVlRaw = options?.AdditionalProperties?.GetValueOrDefault("useVl")?.ToString();
+ var useVl = string.IsNullOrEmpty(useVlRaw)
+ ? chatMessages.Any(c => c.Contents.Any(m => m is ImageContent))
+ : string.Equals(useVlRaw, "true", StringComparison.OrdinalIgnoreCase);
+ var modelId = options?.ModelId ?? _modelId;
+
+ ChatRole? streamedRole = null;
+ ChatFinishReason? finishReason = null;
+ string? completionId = null;
+ if (useVl)
+ {
+ var parameter = ToMultimodalParameters(options);
+ parameter.IncrementalOutput = true;
+ var stream = _dashScopeClient.GetMultimodalGenerationStreamAsync(
+ new ModelRequest()
+ {
+ Input = new MultimodalInput { Messages = ToMultimodalMessages(chatMessages) },
+ Parameters = parameter,
+ Model = modelId
+ },
+ cancellationToken);
+ await foreach (var response in stream)
+ {
+ streamedRole ??= string.IsNullOrEmpty(response.Output.Choices[0].Message.Role)
+ ? null
+ : ToChatRole(response.Output.Choices[0].Message.Role);
+ finishReason ??= string.IsNullOrEmpty(response.Output.Choices[0].FinishReason)
+ ? null
+ : ToFinishReason(response.Output.Choices[0].FinishReason);
+ completionId ??= response.RequestId;
+
+ var update = new StreamingChatCompletionUpdate()
+ {
+ CompletionId = completionId,
+ CreatedAt = DateTimeOffset.Now,
+ FinishReason = finishReason,
+ ModelId = modelId,
+ RawRepresentation = response,
+ Role = streamedRole
+ };
+
+ if (response.Output.Choices[0].Message.Content is { Count: > 0 })
+ {
+ update.Contents.Add(new TextContent(response.Output.Choices[0].Message.Content[0].Text));
+ }
+
+ if (response.Usage != null)
+ {
+ update.Contents.Add(
+ new UsageContent(
+ new UsageDetails()
+ {
+ InputTokenCount = response.Usage.InputTokens,
+ OutputTokenCount = response.Usage.OutputTokens,
+ }));
+ }
+
+ yield return update;
+ }
+ }
+ else
+ {
+ if (options?.Tools is { Count: > 0 })
+ {
+ // qwen does not support streaming with function call, fallback to non-streaming
+ var completion = await CompleteAsync(chatMessages, options, cancellationToken);
+ yield return new StreamingChatCompletionUpdate()
+ {
+ CompletionId = completion.CompletionId,
+ Role = completion.Message.Role,
+ AdditionalProperties = completion.AdditionalProperties,
+ Contents = completion.Message.Contents,
+ RawRepresentation = completion.Message.RawRepresentation,
+ CreatedAt = completion.CreatedAt,
+ FinishReason = completion.FinishReason,
+ ModelId = completion.ModelId,
+ };
+ }
+ else
+ {
+ var parameters = ToTextGenerationParameters(options) ?? DefaultTextGenerationParameter;
+ parameters.IncrementalOutput = true;
+ var stream = _dashScopeClient.GetTextCompletionStreamAsync(
+ new ModelRequest()
+ {
+ Input = new TextGenerationInput
+ {
+ Messages = chatMessages.SelectMany(
+ c => ToTextChatMessages(c, parameters.Tools?.ToList())),
+ Tools = ToToolDefinitions(options?.Tools)
+ },
+ Model = modelId,
+ Parameters = parameters
+ },
+ cancellationToken);
+ await foreach (var response in stream)
+ {
+ streamedRole ??= string.IsNullOrEmpty(response.Output.Choices?.FirstOrDefault()?.Message.Role)
+ ? null
+ : ToChatRole(response.Output.Choices[0].Message.Role);
+ finishReason ??= string.IsNullOrEmpty(response.Output.Choices?.FirstOrDefault()?.FinishReason)
+ ? null
+ : ToFinishReason(response.Output.Choices[0].FinishReason);
+ completionId ??= response.RequestId;
+
+ var update = new StreamingChatCompletionUpdate()
+ {
+ CompletionId = completionId,
+ CreatedAt = DateTimeOffset.Now,
+ FinishReason = finishReason,
+ ModelId = modelId,
+ RawRepresentation = response,
+ Role = streamedRole
+ };
+
+ if (response.Output.Choices?.FirstOrDefault()?.Message.Content is { Length: > 0 })
+ {
+ update.Contents.Add(new TextContent(response.Output.Choices[0].Message.Content));
+ }
+
+ if (response.Usage != null)
+ {
+ update.Contents.Add(
+ new UsageContent(
+ new UsageDetails()
+ {
+ InputTokenCount = response.Usage.InputTokens,
+ OutputTokenCount = response.Usage.OutputTokens,
+ }));
+ }
+
+ yield return update;
+ }
+ }
+ }
+ }
+
+ ///
+ public object? GetService(Type serviceType, object? serviceKey = null)
+ {
+ return
+ serviceKey is not null ? null :
+ serviceType == typeof(IDashScopeClient) ? _dashScopeClient :
+ serviceType.IsInstanceOfType(this) ? this :
+ null;
+ }
+
+ ///
+ public void Dispose()
+ {
+ // nothing to dispose.
+ }
+
+ ///
+ public ChatClientMetadata Metadata { get; }
+
+ private static ChatFinishReason? ToFinishReason(string? finishReason)
+ => string.IsNullOrEmpty(finishReason)
+ ? null
+ : finishReason switch
+ {
+ "stop" => ChatFinishReason.Stop,
+ "length" => ChatFinishReason.ContentFilter,
+ "tool_calls" => ChatFinishReason.ToolCalls,
+ _ => new ChatFinishReason(finishReason),
+ };
+
+ private static ChatMessage ToChatMessage(TextChatMessage message)
+ {
+ var returnMessage = new ChatMessage()
+ {
+ RawRepresentation = message, Role = ToChatRole(message.Role),
+ };
+
+ if (string.IsNullOrEmpty(message.Content) == false)
+ {
+ returnMessage.Contents.Add(new TextContent(message.Content));
+ }
+
+ if (message.ToolCalls is { Count: > 0 })
+ {
+ message.ToolCalls.ForEach(
+ call =>
+ {
+ var arguments = string.IsNullOrEmpty(call.Function.Arguments)
+ ? null
+ : JsonSerializer.Deserialize>(call.Function.Arguments);
+ returnMessage.Contents.Add(
+ new FunctionCallContent(
+ call.Id ?? string.Empty,
+ call.Function.Name,
+ arguments) { RawRepresentation = call });
+ });
+ }
+
+ return returnMessage;
+ }
+
+ private static ChatRole ToChatRole(string role)
+ => role switch
+ {
+ DashScopeRoleNames.System => ChatRole.System,
+ DashScopeRoleNames.User => ChatRole.User,
+ DashScopeRoleNames.Assistant => ChatRole.Assistant,
+ DashScopeRoleNames.Tool => ChatRole.Tool,
+ _ => new ChatRole(role),
+ };
+
+ private MultimodalParameters ToMultimodalParameters(ChatOptions? options)
+ {
+ var parameters = new MultimodalParameters();
+ if (options is null)
+ {
+ return parameters;
+ }
+
+ parameters.Temperature = options.Temperature;
+ parameters.MaxTokens = options.MaxOutputTokens;
+ parameters.TopP = options.TopP;
+ parameters.TopK = options.TopK;
+ parameters.RepetitionPenalty = options.FrequencyPenalty;
+ parameters.PresencePenalty = options.PresencePenalty;
+ parameters.Seed = (ulong?)options.Seed;
+ if (options.StopSequences is { Count: > 0 })
+ {
+ parameters.Stop = new TextGenerationStop(options.StopSequences);
+ }
+
+ return parameters;
+ }
+
+ private IEnumerable ToMultimodalMessages(IEnumerable messages)
+ {
+ foreach (var from in messages)
+ {
+ if (from.Role == ChatRole.System || from.Role == ChatRole.User)
+ {
+ var contents = ToMultimodalMessageContents(from.Contents);
+ yield return from.Role == ChatRole.System
+ ? MultimodalMessage.System(contents)
+ : MultimodalMessage.User(contents);
+ }
+ else if (from.Role == ChatRole.Tool)
+ {
+ // do not support tool.
+ }
+ else if (from.Role == ChatRole.Assistant)
+ {
+ var contents = ToMultimodalMessageContents(from.Contents);
+ yield return MultimodalMessage.Assistant(contents);
+ }
+ }
+ }
+
+ private List ToMultimodalMessageContents(IList contents)
+ {
+ var mapped = new List();
+ foreach (var aiContent in contents)
+ {
+ var content = aiContent switch
+ {
+ TextContent text => MultimodalMessageContent.TextContent(text.Text),
+ ImageContent { Data.Length: > 0 } image => MultimodalMessageContent.ImageContent(
+ image.Data.Value.Span,
+ image.MediaType ?? throw new InvalidOperationException("image media type should not be null")),
+ ImageContent { Uri: { } uri } => MultimodalMessageContent.ImageContent(uri),
+ _ => null
+ };
+ if (content is not null)
+ {
+ mapped.Add(content);
+ }
+ }
+
+ if (mapped.Count == 0)
+ {
+ mapped.Add(MultimodalMessageContent.TextContent(string.Empty));
+ }
+
+ return mapped;
+ }
+
+ private IEnumerable ToTextChatMessages(
+ ChatMessage from,
+ List? tools)
+ {
+ if (from.Role == ChatRole.System || from.Role == ChatRole.User)
+ {
+ yield return new TextChatMessage(
+ from.Role.Value,
+ from.Text ?? string.Empty,
+ from.AuthorName);
+ }
+ else if (from.Role == ChatRole.Tool)
+ {
+ foreach (var content in from.Contents)
+ {
+ if (content is FunctionResultContent resultContent)
+ {
+ var result = resultContent.Result as string;
+ if (result is null && resultContent.Result is not null)
+ {
+ try
+ {
+ result = JsonSerializer.Serialize(resultContent.Result, ToolCallJsonSerializerOptions);
+ }
+ catch (NotSupportedException)
+ {
+ // If the type can't be serialized, skip it.
+ }
+ }
+
+ yield return new TextChatMessage(from.Role.Value, result ?? string.Empty);
+ }
+ }
+ }
+ else if (from.Role == ChatRole.Assistant)
+ {
+ var functionCall = from.Contents
+ .OfType()
+ .Select(
+ c => new ToolCall(
+ c.CallId,
+ "function",
+ tools?.FindIndex(f => f.Function?.Name == c.Name) ?? -1,
+ new FunctionCall(c.Name, JsonSerializer.Serialize(c.Arguments, ToolCallJsonSerializerOptions))))
+ .ToList();
+ yield return new TextChatMessage(
+ from.Role.Value,
+ from.Text ?? string.Empty,
+ from.AuthorName,
+ null,
+ functionCall);
+ }
+ }
+
+ private static TextGenerationParameters? ToTextGenerationParameters(ChatOptions? options)
+ {
+ if (options is null)
+ {
+ return null;
+ }
+
+ var format = "message";
+ if (options.ResponseFormat is ChatResponseFormatJson)
+ {
+ format = "json_object";
+ }
+
+ return new TextGenerationParameters()
+ {
+ ResultFormat = format,
+ Temperature = options.Temperature,
+ MaxTokens = options.MaxOutputTokens,
+ TopP = options.TopP,
+ TopK = options.TopK,
+ RepetitionPenalty = options.FrequencyPenalty,
+ PresencePenalty = options.PresencePenalty,
+ Seed = options.Seed == null ? null : (ulong)options.Seed.Value,
+ Stop = options.StopSequences == null ? null : new TextGenerationStop(options.StopSequences),
+ Tools = options.Tools == null ? null : ToToolDefinitions(options.Tools),
+ ToolChoice = options.ToolMode switch
+ {
+ AutoChatToolMode => ToolChoice.AutoChoice,
+ RequiredChatToolMode required when string.IsNullOrEmpty(required.RequiredFunctionName) == false =>
+ ToolChoice.FunctionChoice(required.RequiredFunctionName),
+ _ => ToolChoice.AutoChoice
+ }
+ };
+ }
+
+ private static IEnumerable? ToToolDefinitions(IList? tools)
+ {
+ return tools?.OfType().Select(
+ f => new ToolDefinition(
+ "function",
+ new FunctionDefinition(
+ f.Metadata.Name,
+ f.Metadata.Description,
+ GetParameterSchema(f.Metadata.Parameters))));
+ }
+
+ private static JsonSchema GetParameterSchema(IEnumerable metadata)
+ {
+ return new JsonSchemaBuilder()
+ .Properties(metadata.Select(c => (c.Name, Schema: c.Schema as JsonSchema ?? EmptyObjectSchema)).ToArray())
+ .Build();
+ }
+}
diff --git a/src/Cnblogs.DashScope.AI/DashScopeClientExtensions.cs b/src/Cnblogs.DashScope.AI/DashScopeClientExtensions.cs
new file mode 100644
index 0000000..5c39e9a
--- /dev/null
+++ b/src/Cnblogs.DashScope.AI/DashScopeClientExtensions.cs
@@ -0,0 +1,28 @@
+using Cnblogs.DashScope.AI;
+using Cnblogs.DashScope.Core;
+
+// ReSharper disable once CheckNamespace
+namespace Microsoft.Extensions.AI;
+
+///
+/// Provides extension methods for working with s.
+public static class DashScopeClientExtensions
+{
+ /// Gets an for use with this .
+ /// The client.
+ /// The model.
+ /// An that can be used to converse via the .
+ public static IChatClient AsChatClient(this IDashScopeClient dashScopeClient, string modelId)
+ => new DashScopeChatClient(dashScopeClient, modelId);
+
+ /// Gets an for use with this .
+ /// The client.
+ /// The model to use.
+ /// The number of dimensions to generate in each embedding.
+ /// An that can be used to generate embeddings via the .
+ public static IEmbeddingGenerator> AsEmbeddingGenerator(
+ this IDashScopeClient dashScopeClient,
+ string modelId,
+ int? dimensions = null)
+ => new DashScopeTextEmbeddingGenerator(dashScopeClient, modelId, dimensions);
+}
diff --git a/src/Cnblogs.DashScope.AI/DashScopeTextEmbeddingGenerator.cs b/src/Cnblogs.DashScope.AI/DashScopeTextEmbeddingGenerator.cs
new file mode 100644
index 0000000..fba5e02
--- /dev/null
+++ b/src/Cnblogs.DashScope.AI/DashScopeTextEmbeddingGenerator.cs
@@ -0,0 +1,94 @@
+using System.Diagnostics.CodeAnalysis;
+using Cnblogs.DashScope.Core;
+using Cnblogs.DashScope.Sdk.TextEmbedding;
+using Microsoft.Extensions.AI;
+
+namespace Cnblogs.DashScope.AI;
+
+///
+/// An for a DashScope client.
+///
+public sealed class DashScopeTextEmbeddingGenerator
+ : IEmbeddingGenerator>
+{
+ private readonly IDashScopeClient _dashScopeClient;
+ private readonly string _modelId;
+ private readonly TextEmbeddingParameters _parameters;
+
+ ///
+ /// Initialize a new instance of the class.
+ ///
+ /// The underlying client.
+ /// The model name used to generate embedding.
+ /// The number of dimensions produced by the generator.
+ public DashScopeTextEmbeddingGenerator(IDashScopeClient dashScopeClient, string modelId, int? dimensions = null)
+ {
+ ArgumentNullException.ThrowIfNull(dashScopeClient, nameof(dashScopeClient));
+ ArgumentNullException.ThrowIfNull(modelId, nameof(modelId));
+
+ _dashScopeClient = dashScopeClient;
+ _modelId = modelId;
+ _parameters = new TextEmbeddingParameters { Dimension = dimensions };
+ Metadata = new EmbeddingGeneratorMetadata("dashscope", _dashScopeClient.BaseAddress, modelId, dimensions);
+ }
+
+ ///
+ public async Task>> GenerateAsync(
+ IEnumerable values,
+ EmbeddingGenerationOptions? options = null,
+ CancellationToken cancellationToken = default)
+ {
+ var parameters = ToParameters(options) ?? _parameters;
+ var rawResponse =
+ await _dashScopeClient.GetTextEmbeddingsAsync(_modelId, values, parameters, cancellationToken);
+ var embeddings = rawResponse.Output.Embeddings.Select(
+ e => new Embedding(e.Embedding) { ModelId = _modelId, CreatedAt = DateTimeOffset.Now });
+ var rawUsage = rawResponse.Usage;
+ var usage = rawUsage != null
+ ? new UsageDetails() { InputTokenCount = rawUsage.TotalTokens, TotalTokenCount = rawUsage.TotalTokens }
+ : null;
+ return new GeneratedEmbeddings>(embeddings)
+ {
+ Usage = usage,
+ AdditionalProperties =
+ new AdditionalPropertiesDictionary { { nameof(rawResponse.RequestId), rawResponse.RequestId } }
+ };
+ }
+
+ ///
+ public object? GetService(Type serviceType, object? serviceKey = null)
+ {
+ return
+ serviceKey is not null ? null :
+ serviceType == typeof(IDashScopeClient) ? _dashScopeClient :
+ serviceType.IsInstanceOfType(this) ? this :
+ null;
+ }
+
+ ///
+ public void Dispose()
+ {
+ // Nothing to dispose. Implementation required for the IEmbeddingGenerator interface.
+ }
+
+ [return: NotNullIfNotNull(nameof(options))]
+ private static TextEmbeddingParameters? ToParameters(EmbeddingGenerationOptions? options)
+ {
+ if (options is null)
+ {
+ return null;
+ }
+
+ return new TextEmbeddingParameters
+ {
+ Dimension = options.Dimensions,
+ OutputType =
+ options.AdditionalProperties?.GetValueOrDefault(nameof(TextEmbeddingParameters.OutputType)) as string,
+ TextType =
+ options.AdditionalProperties?.GetValueOrDefault(nameof(TextEmbeddingParameters.TextType)) as string,
+ };
+ }
+
+ ///
+ public EmbeddingGeneratorMetadata Metadata { get; }
+}
diff --git a/src/Cnblogs.DashScope.Core/MultimodalMessageContent.cs b/src/Cnblogs.DashScope.Core/MultimodalMessageContent.cs
index f6ffed7..98fff19 100644
--- a/src/Cnblogs.DashScope.Core/MultimodalMessageContent.cs
+++ b/src/Cnblogs.DashScope.Core/MultimodalMessageContent.cs
@@ -33,6 +33,26 @@ public static MultimodalMessageContent ImageContent(string url, int? minPixels =
return new MultimodalMessageContent(url, MinPixels: minPixels, MaxPixels: maxPixels);
}
+ ///
+ /// Represents an image content.
+ ///
+ /// Image binary to sent using base64 data uri.
+ /// Image media type.
+ /// For qwen-vl-ocr only. Minimal pixels for ocr task.
+ /// For qwen-vl-ocr only. Maximum pixels for ocr task.
+ ///
+ public static MultimodalMessageContent ImageContent(
+ ReadOnlySpan bytes,
+ string mediaType,
+ int? minPixels = null,
+ int? maxPixels = null)
+ {
+ return ImageContent(
+ $"data:{mediaType};base64,{Convert.ToBase64String(bytes)}",
+ minPixels,
+ maxPixels);
+ }
+
///
/// Represents a text content.
///
diff --git a/src/Cnblogs.DashScope.Core/ChatMessage.cs b/src/Cnblogs.DashScope.Core/TextChatMessage.cs
similarity index 71%
rename from src/Cnblogs.DashScope.Core/ChatMessage.cs
rename to src/Cnblogs.DashScope.Core/TextChatMessage.cs
index 0c3c0be..71d6fd8 100644
--- a/src/Cnblogs.DashScope.Core/ChatMessage.cs
+++ b/src/Cnblogs.DashScope.Core/TextChatMessage.cs
@@ -12,7 +12,7 @@ namespace Cnblogs.DashScope.Core;
/// Notify model that next message should use this message as prefix.
/// Calls to the function.
[method: JsonConstructor]
-public record ChatMessage(
+public record TextChatMessage(
string Role,
string Content,
string? Name = null,
@@ -23,7 +23,7 @@ public record ChatMessage(
/// Create chat message from an uploaded DashScope file.
///
/// The id of the file.
- public ChatMessage(DashScopeFileId fileId)
+ public TextChatMessage(DashScopeFileId fileId)
: this("system", fileId.ToUrl())
{
}
@@ -32,7 +32,7 @@ public ChatMessage(DashScopeFileId fileId)
/// Create chat message from multiple DashScope file.
///
/// Ids of the files.
- public ChatMessage(IEnumerable fileIds)
+ public TextChatMessage(IEnumerable fileIds)
: this("system", string.Join(',', fileIds.Select(f => f.ToUrl())))
{
}
@@ -42,9 +42,9 @@ public ChatMessage(IEnumerable fileIds)
///
/// The id of the file.
///
- public static ChatMessage File(DashScopeFileId fileId)
+ public static TextChatMessage File(DashScopeFileId fileId)
{
- return new ChatMessage(fileId);
+ return new TextChatMessage(fileId);
}
///
@@ -52,9 +52,9 @@ public static ChatMessage File(DashScopeFileId fileId)
///
/// The file id list.
///
- public static ChatMessage File(IEnumerable fileIds)
+ public static TextChatMessage File(IEnumerable fileIds)
{
- return new ChatMessage(fileIds);
+ return new TextChatMessage(fileIds);
}
///
@@ -63,9 +63,9 @@ public static ChatMessage File(IEnumerable fileIds)
/// Content of the message.
/// Author name.
///
- public static ChatMessage User(string content, string? name = null)
+ public static TextChatMessage User(string content, string? name = null)
{
- return new ChatMessage(DashScopeRoleNames.User, content, name);
+ return new TextChatMessage(DashScopeRoleNames.User, content, name);
}
///
@@ -73,9 +73,9 @@ public static ChatMessage User(string content, string? name = null)
///
/// The content of the message.
///
- public static ChatMessage System(string content)
+ public static TextChatMessage System(string content)
{
- return new ChatMessage(DashScopeRoleNames.System, content);
+ return new TextChatMessage(DashScopeRoleNames.System, content);
}
///
@@ -86,9 +86,9 @@ public static ChatMessage System(string content)
/// Author name.
/// Tool calls by model.
///
- public static ChatMessage Assistant(string content, bool? partial = null, string? name = null, List? toolCalls = null)
+ public static TextChatMessage Assistant(string content, bool? partial = null, string? name = null, List? toolCalls = null)
{
- return new ChatMessage(DashScopeRoleNames.Assistant, content, name, partial, toolCalls);
+ return new TextChatMessage(DashScopeRoleNames.Assistant, content, name, partial, toolCalls);
}
///
@@ -97,8 +97,8 @@ public static ChatMessage Assistant(string content, bool? partial = null, string
/// The output from tool.
/// The name of the tool.
///
- public static ChatMessage Tool(string content, string? name = null)
+ public static TextChatMessage Tool(string content, string? name = null)
{
- return new ChatMessage(DashScopeRoleNames.Tool, content, name);
+ return new TextChatMessage(DashScopeRoleNames.Tool, content, name);
}
}
diff --git a/src/Cnblogs.DashScope.Core/TextGenerationChoice.cs b/src/Cnblogs.DashScope.Core/TextGenerationChoice.cs
index 85ac92e..6f5d7ab 100644
--- a/src/Cnblogs.DashScope.Core/TextGenerationChoice.cs
+++ b/src/Cnblogs.DashScope.Core/TextGenerationChoice.cs
@@ -13,5 +13,5 @@ public class TextGenerationChoice
///
/// The generated message.
///
- public required ChatMessage Message { get; set; }
+ public required TextChatMessage Message { get; set; }
}
diff --git a/src/Cnblogs.DashScope.Core/TextGenerationInput.cs b/src/Cnblogs.DashScope.Core/TextGenerationInput.cs
index f3d7384..a3d0a34 100644
--- a/src/Cnblogs.DashScope.Core/TextGenerationInput.cs
+++ b/src/Cnblogs.DashScope.Core/TextGenerationInput.cs
@@ -13,7 +13,7 @@ public class TextGenerationInput
///
/// The collection of context messages associated with this chat completions request.
///
- public IEnumerable? Messages { get; set; }
+ public IEnumerable? Messages { get; set; }
///
/// Available tools for model to use.
diff --git a/src/Cnblogs.DashScope.Core/ToolCall.cs b/src/Cnblogs.DashScope.Core/ToolCall.cs
index e795d08..34450dc 100644
--- a/src/Cnblogs.DashScope.Core/ToolCall.cs
+++ b/src/Cnblogs.DashScope.Core/ToolCall.cs
@@ -5,5 +5,6 @@
///
/// Id of this tool call.
/// Type of the tool.
+/// Index of this tool in input tool list.
/// Not null if type is function.
-public record ToolCall(string? Id, string Type, FunctionCall? Function);
+public record ToolCall(string? Id, string Type, int Index, FunctionCall Function);
diff --git a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanTextGenerationApi.cs b/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanTextGenerationApi.cs
index 7dc3feb..ef3ec60 100644
--- a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanTextGenerationApi.cs
+++ b/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanTextGenerationApi.cs
@@ -54,7 +54,7 @@ public static Task
public static Task> GetBaiChuanTextCompletionAsync(
this IDashScopeClient client,
BaiChuan2Llm llm,
- IEnumerable messages,
+ IEnumerable messages,
string? resultFormat = null)
{
return client.GetBaiChuanTextCompletionAsync(llm.GetModelName(), messages, resultFormat);
@@ -71,7 +71,7 @@ public static Task
public static Task> GetBaiChuanTextCompletionAsync(
this IDashScopeClient client,
string llm,
- IEnumerable messages,
+ IEnumerable messages,
string? resultFormat = null)
{
return client.GetTextCompletionAsync(
diff --git a/src/Cnblogs.DashScope.Sdk/Llama2/Llama2TextGenerationApi.cs b/src/Cnblogs.DashScope.Sdk/Llama2/Llama2TextGenerationApi.cs
index 67d19d5..5fa9b45 100644
--- a/src/Cnblogs.DashScope.Sdk/Llama2/Llama2TextGenerationApi.cs
+++ b/src/Cnblogs.DashScope.Sdk/Llama2/Llama2TextGenerationApi.cs
@@ -19,7 +19,7 @@ public static async Task messages,
+ IEnumerable messages,
string? resultFormat = null)
{
return await client.GetLlama2TextCompletionAsync(model.GetModelName(), messages, resultFormat);
@@ -37,7 +37,7 @@ public static async Task messages,
+ IEnumerable messages,
string? resultFormat = null)
{
return await client.GetTextCompletionAsync(
diff --git a/src/Cnblogs.DashScope.Sdk/QWen/QWenTextGenerationApi.cs b/src/Cnblogs.DashScope.Sdk/QWen/QWenTextGenerationApi.cs
index d271e68..00e6f3d 100644
--- a/src/Cnblogs.DashScope.Sdk/QWen/QWenTextGenerationApi.cs
+++ b/src/Cnblogs.DashScope.Sdk/QWen/QWenTextGenerationApi.cs
@@ -20,7 +20,7 @@ public static class QWenTextGenerationApi
public static IAsyncEnumerable> GetQWenChatStreamAsync(
this IDashScopeClient dashScopeClient,
QWenLlm model,
- IEnumerable messages,
+ IEnumerable messages,
TextGenerationParameters? parameters = null,
CancellationToken cancellationToken = default)
{
@@ -48,7 +48,7 @@ public static IAsyncEnumerable> GetQWenChatStreamAsync(
this IDashScopeClient dashScopeClient,
string model,
- IEnumerable messages,
+ IEnumerable messages,
TextGenerationParameters? parameters = null,
CancellationToken cancellationToken = default)
{
@@ -75,7 +75,7 @@ public static IAsyncEnumerable> GetQWenChatCompletionAsync(
this IDashScopeClient dashScopeClient,
QWenLlm model,
- IEnumerable messages,
+ IEnumerable messages,
TextGenerationParameters? parameters = null,
CancellationToken cancellationToken = default)
{
@@ -99,7 +99,7 @@ public static Task
public static Task> GetQWenChatCompletionAsync(
this IDashScopeClient dashScopeClient,
string model,
- IEnumerable messages,
+ IEnumerable messages,
TextGenerationParameters? parameters = null,
CancellationToken cancellationToken = default)
{
diff --git a/src/Cnblogs.DashScope.Sdk/TextEmbedding/TextEmbeddingModelNames.cs b/src/Cnblogs.DashScope.Sdk/TextEmbedding/TextEmbeddingModelNames.cs
index 7820e51..410c33e 100644
--- a/src/Cnblogs.DashScope.Sdk/TextEmbedding/TextEmbeddingModelNames.cs
+++ b/src/Cnblogs.DashScope.Sdk/TextEmbedding/TextEmbeddingModelNames.cs
@@ -9,7 +9,7 @@ public static string GetModelName(this TextEmbeddingModel model)
TextEmbeddingModel.TextEmbeddingV1 => "text-embedding-v1",
TextEmbeddingModel.TextEmbeddingV2 => "text-embedding-v2",
TextEmbeddingModel.TextEmbeddingV3 => "text-embedding-v3",
- _ => ThrowHelper.UnknownModelName(nameof(model), model)
+ _ => ThrowHelper.UnknownModelName(nameof(model), model),
};
}
}
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/ChatClientTests.cs b/test/Cnblogs.DashScope.Sdk.UnitTests/ChatClientTests.cs
new file mode 100644
index 0000000..9a58f70
--- /dev/null
+++ b/test/Cnblogs.DashScope.Sdk.UnitTests/ChatClientTests.cs
@@ -0,0 +1,190 @@
+using System.Text;
+using Cnblogs.DashScope.Core;
+using Cnblogs.DashScope.Sdk.UnitTests.Utils;
+using FluentAssertions;
+using Microsoft.Extensions.AI;
+using NSubstitute;
+using NSubstitute.Extensions;
+
+namespace Cnblogs.DashScope.Sdk.UnitTests;
+
+public class ChatClientTests
+{
+ [Fact]
+ public async Task ChatClient_TextCompletion_SuccessAsync()
+ {
+ // Arrange
+ var testCase = Snapshots.TextGeneration.MessageFormat.SingleChatClientMessage;
+ var dashScopeClient = Substitute.For();
+ dashScopeClient.Configure()
+ .GetTextCompletionAsync(
+ Arg.Any>(),
+ Arg.Any())
+ .Returns(Task.FromResult(testCase.ResponseModel));
+ var client = dashScopeClient.AsChatClient(testCase.RequestModel.Model);
+ var content = testCase.RequestModel.Input.Messages!.First().Content;
+ var parameter = testCase.RequestModel.Parameters;
+
+ // Act
+ var response = await client.CompleteAsync(
+ content,
+ new ChatOptions()
+ {
+ FrequencyPenalty = parameter?.RepetitionPenalty,
+ PresencePenalty = parameter?.PresencePenalty,
+ ModelId = testCase.RequestModel.Model,
+ MaxOutputTokens = parameter?.MaxTokens,
+ Seed = (long?)parameter?.Seed,
+ Temperature = parameter?.Temperature,
+ TopK = parameter?.TopK,
+ TopP = parameter?.TopP,
+ ToolMode = ChatToolMode.Auto
+ });
+
+ // Assert
+ _ = dashScopeClient.Received().GetTextCompletionAsync(
+ Arg.Is>(
+ m => m.IsEquivalent(testCase.RequestModel)),
+ Arg.Any());
+ response.Message.Text.Should().Be(testCase.ResponseModel.Output.Choices?.First().Message.Content);
+ }
+
+ [Fact]
+ public async Task ChatClient_TextCompletionStream_SuccessAsync()
+ {
+ // Arrange
+ var testCase = Snapshots.TextGeneration.MessageFormat.SingleMessageChatClientIncremental;
+ var dashScopeClient = Substitute.For();
+ var returnThis = new[] { testCase.ResponseModel }.ToAsyncEnumerable();
+ dashScopeClient
+ .Configure()
+ .GetTextCompletionStreamAsync(
+ Arg.Any>(),
+ Arg.Any())
+ .Returns(returnThis);
+ var client = dashScopeClient.AsChatClient(testCase.RequestModel.Model);
+ var content = testCase.RequestModel.Input.Messages!.First().Content;
+ var parameter = testCase.RequestModel.Parameters;
+
+ // Act
+ var response = client.CompleteStreamingAsync(
+ content,
+ new ChatOptions()
+ {
+ FrequencyPenalty = parameter?.RepetitionPenalty,
+ PresencePenalty = parameter?.PresencePenalty,
+ ModelId = testCase.RequestModel.Model,
+ MaxOutputTokens = parameter?.MaxTokens,
+ Seed = (long?)parameter?.Seed,
+ Temperature = parameter?.Temperature,
+ TopK = parameter?.TopK,
+ TopP = parameter?.TopP,
+ StopSequences = ["你好"],
+ ToolMode = ChatToolMode.Auto
+ });
+ var text = new StringBuilder();
+ await foreach (var update in response)
+ {
+ text.Append(update.Text);
+ }
+
+ // Assert
+ _ = dashScopeClient.Received().GetTextCompletionStreamAsync(
+ Arg.Is>(
+ m => m.IsEquivalent(testCase.RequestModel)),
+ Arg.Any());
+ text.ToString().Should().Be(testCase.ResponseModel.Output.Choices?.First().Message.Content);
+ }
+
+ [Fact]
+ public async Task ChatClient_ImageRecognition_SuccessAsync()
+ {
+ // Arrange
+ var testCase = Snapshots.MultimodalGeneration.VlChatClientNoSse;
+ var dashScopeClient = Substitute.For();
+ dashScopeClient.Configure()
+ .GetMultimodalGenerationAsync(
+ Arg.Any>(),
+ Arg.Any())
+ .Returns(Task.FromResult(testCase.ResponseModel));
+ var client = dashScopeClient.AsChatClient(testCase.RequestModel.Model);
+ var contents = testCase.RequestModel.Input.Messages.Last().Content;
+ var messages = new List
+ {
+ new(
+ ChatRole.User,
+ [new ImageContent(contents[0].Image!), new TextContent(contents[1].Text)])
+ };
+ var parameter = testCase.RequestModel.Parameters;
+
+ // Act
+ var response = await client.CompleteAsync(
+ messages,
+ new ChatOptions
+ {
+ FrequencyPenalty = parameter?.RepetitionPenalty,
+ PresencePenalty = parameter?.PresencePenalty,
+ ModelId = testCase.RequestModel.Model,
+ MaxOutputTokens = parameter?.MaxTokens,
+ Seed = (long?)parameter?.Seed,
+ Temperature = parameter?.Temperature,
+ TopK = parameter?.TopK,
+ TopP = parameter?.TopP,
+ });
+
+ // Assert
+ await dashScopeClient.Received().GetMultimodalGenerationAsync(
+ Arg.Is>(m => m.IsEquivalent(testCase.RequestModel)),
+ Arg.Any());
+ response.Choices[0].Text.Should()
+ .BeEquivalentTo(testCase.ResponseModel.Output.Choices[0].Message.Content[0].Text);
+ }
+
+ [Fact]
+ public async Task ChatClient_ImageRecognitionStream_SuccessAsync()
+ {
+ // Arrange
+ var testCase = Snapshots.MultimodalGeneration.VlChatClientSse;
+ var dashScopeClient = Substitute.For();
+ dashScopeClient.Configure()
+ .GetMultimodalGenerationStreamAsync(
+ Arg.Any>(),
+ Arg.Any())
+ .Returns(new[] { testCase.ResponseModel }.ToAsyncEnumerable());
+ var client = dashScopeClient.AsChatClient(testCase.RequestModel.Model);
+ var contents = testCase.RequestModel.Input.Messages.Last().Content;
+ var messages = new List
+ {
+ new(
+ ChatRole.User,
+ [new ImageContent(contents[0].Image!), new TextContent(contents[1].Text)])
+ };
+ var parameter = testCase.RequestModel.Parameters;
+
+ // Act
+ var response = client.CompleteStreamingAsync(
+ messages,
+ new ChatOptions()
+ {
+ FrequencyPenalty = parameter?.RepetitionPenalty,
+ PresencePenalty = parameter?.PresencePenalty,
+ ModelId = testCase.RequestModel.Model,
+ MaxOutputTokens = parameter?.MaxTokens,
+ Seed = (long?)parameter?.Seed,
+ Temperature = parameter?.Temperature,
+ TopK = parameter?.TopK,
+ TopP = parameter?.TopP,
+ });
+ var text = new StringBuilder();
+ await foreach (var update in response)
+ {
+ text.Append(update.Text);
+ }
+
+ // Assert
+ _ = dashScopeClient.Received().GetMultimodalGenerationStreamAsync(
+ Arg.Is>(m => m.IsEquivalent(testCase.RequestModel)),
+ Arg.Any());
+ text.ToString().Should().Be(testCase.ResponseModel.Output.Choices.First().Message.Content[0].Text);
+ }
+}
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/Cnblogs.DashScope.Sdk.UnitTests.csproj b/test/Cnblogs.DashScope.Sdk.UnitTests/Cnblogs.DashScope.Sdk.UnitTests.csproj
index fb88498..7a58995 100644
--- a/test/Cnblogs.DashScope.Sdk.UnitTests/Cnblogs.DashScope.Sdk.UnitTests.csproj
+++ b/test/Cnblogs.DashScope.Sdk.UnitTests/Cnblogs.DashScope.Sdk.UnitTests.csproj
@@ -33,6 +33,7 @@
+
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/EmbeddingClientTests.cs b/test/Cnblogs.DashScope.Sdk.UnitTests/EmbeddingClientTests.cs
new file mode 100644
index 0000000..96e21b4
--- /dev/null
+++ b/test/Cnblogs.DashScope.Sdk.UnitTests/EmbeddingClientTests.cs
@@ -0,0 +1,43 @@
+using Cnblogs.DashScope.Core;
+using Cnblogs.DashScope.Sdk.UnitTests.Utils;
+using FluentAssertions;
+using Microsoft.Extensions.AI;
+using NSubstitute;
+using NSubstitute.Extensions;
+
+namespace Cnblogs.DashScope.Sdk.UnitTests;
+
+public class EmbeddingClientTests
+{
+ [Fact]
+ public async Task EmbeddingClient_Text_SuccessAsync()
+ {
+ // Arrange
+ var testCase = Snapshots.TextEmbedding.EmbeddingClientNoSse;
+ var dashScopeClient = Substitute.For();
+ dashScopeClient.Configure()
+ .GetEmbeddingsAsync(
+ Arg.Any>(),
+ Arg.Any())
+ .Returns(Task.FromResult(testCase.ResponseModel));
+ var client = dashScopeClient.AsEmbeddingGenerator(testCase.RequestModel.Model, 1024);
+ var content = testCase.RequestModel.Input.Texts.ToList();
+ var parameter = testCase.RequestModel.Parameters;
+
+ // Act
+ var response = await client.GenerateAsync(
+ content,
+ new EmbeddingGenerationOptions()
+ {
+ ModelId = testCase.RequestModel.Model, Dimensions = parameter?.Dimension
+ });
+
+ // Assert
+ _ = dashScopeClient.Received().GetEmbeddingsAsync(
+ Arg.Is>(
+ m => m.IsEquivalent(testCase.RequestModel)),
+ Arg.Any());
+ response.Select(x => x.Vector.ToArray()).Should()
+ .BeEquivalentTo(testCase.ResponseModel.Output.Embeddings.Select(x => x.Embedding));
+ }
+}
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/TextGenerationSerializationTests.cs b/test/Cnblogs.DashScope.Sdk.UnitTests/TextGenerationSerializationTests.cs
index 4cb54b0..eda61e4 100644
--- a/test/Cnblogs.DashScope.Sdk.UnitTests/TextGenerationSerializationTests.cs
+++ b/test/Cnblogs.DashScope.Sdk.UnitTests/TextGenerationSerializationTests.cs
@@ -141,7 +141,8 @@ public async Task ConversationCompletion_MessageFormatSse_SuccessAsync(
public static readonly TheoryData,
ModelResponse>> SingleGenerationMessageFormatData = new(
Snapshots.TextGeneration.MessageFormat.SingleMessage,
- Snapshots.TextGeneration.MessageFormat.SingleMessageWithTools);
+ Snapshots.TextGeneration.MessageFormat.SingleMessageWithTools,
+ Snapshots.TextGeneration.MessageFormat.SingleMessageJson);
public static readonly TheoryData,
ModelResponse>> ConversationMessageFormatSseData = new(
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/Cases.cs b/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/Cases.cs
index ef89ef4..79daec6 100644
--- a/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/Cases.cs
+++ b/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/Cases.cs
@@ -10,6 +10,6 @@ internal class Cases
public const string Uuid = "33da8e6b-1309-9a44-be83-352165959608";
public const string ImageUrl = "https://www.cnblogs.com/image.png";
- public static readonly List TextMessages =
- [ChatMessage.System("you are a helpful assistant"), ChatMessage.User("hello")];
+ public static readonly List TextMessages =
+ [TextChatMessage.System("you are a helpful assistant"), TextChatMessage.User("hello")];
}
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/EquivalentUtils.cs b/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/EquivalentUtils.cs
new file mode 100644
index 0000000..70f6e54
--- /dev/null
+++ b/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/EquivalentUtils.cs
@@ -0,0 +1,20 @@
+using FluentAssertions;
+
+namespace Cnblogs.DashScope.Sdk.UnitTests.Utils;
+
+public static class EquivalentUtils
+{
+ internal static bool IsEquivalent(this T left, T right)
+ {
+ try
+ {
+ left.Should().BeEquivalentTo(right);
+ }
+ catch (Exception)
+ {
+ return false;
+ }
+
+ return true;
+ }
+}
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/Snapshots.cs b/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/Snapshots.cs
index 8ed0fb0..b5e2483 100644
--- a/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/Snapshots.cs
+++ b/test/Cnblogs.DashScope.Sdk.UnitTests/Utils/Snapshots.cs
@@ -191,7 +191,7 @@ public static class MessageFormat
{
Model = "qwen-max",
Input =
- new TextGenerationInput { Messages = [ChatMessage.User("请问 1+1 是多少?")] },
+ new TextGenerationInput { Messages = [TextChatMessage.User("请问 1+1 是多少?")] },
Parameters = new TextGenerationParameters
{
ResultFormat = "message",
@@ -215,7 +215,51 @@ public static class MessageFormat
new TextGenerationChoice
{
FinishReason = "stop",
- Message = ChatMessage.Assistant(
+ Message = TextChatMessage.Assistant(
+ "1+1 等于 2。这是最基本的数学加法之一,在十进制计数体系中,任何两个相同的数字相加都等于该数字的二倍。")
+ }
+ ]
+ },
+ RequestId = "e764bfe3-c0b7-97a0-ae57-cd99e1580960",
+ Usage = new TextGenerationTokenUsage
+ {
+ TotalTokens = 47,
+ OutputTokens = 39,
+ InputTokens = 8
+ }
+ });
+
+ public static readonly RequestSnapshot,
+ ModelResponse>
+ SingleChatClientMessage = new(
+ "single-generation-message",
+ new ModelRequest
+ {
+ Model = "qwen-max",
+ Input =
+ new TextGenerationInput { Messages = [TextChatMessage.User("请问 1+1 是多少?")] },
+ Parameters = new TextGenerationParameters
+ {
+ ResultFormat = "message",
+ Seed = 1234,
+ MaxTokens = 1500,
+ TopP = 0.8f,
+ TopK = 100,
+ RepetitionPenalty = 1.1f,
+ Temperature = 0.85f,
+ ToolChoice = ToolChoice.AutoChoice
+ }
+ },
+ new ModelResponse
+ {
+ Output = new TextGenerationOutput
+ {
+ Choices =
+ [
+ new TextGenerationChoice
+ {
+ FinishReason = "stop",
+ Message = TextChatMessage.Assistant(
"1+1 等于 2。这是最基本的数学加法之一,在十进制计数体系中,任何两个相同的数字相加都等于该数字的二倍。")
}
]
@@ -237,7 +281,7 @@ public static class MessageFormat
{
Model = "qwen-max",
Input =
- new TextGenerationInput { Messages = [ChatMessage.User("请问 1+1 是多少?用 JSON 格式输出。")] },
+ new TextGenerationInput { Messages = [TextChatMessage.User("请问 1+1 是多少?用 JSON 格式输出。")] },
Parameters = new TextGenerationParameters
{
ResultFormat = "message",
@@ -262,7 +306,7 @@ public static class MessageFormat
new TextGenerationChoice
{
FinishReason = "stop",
- Message = ChatMessage.Assistant("{\\n \\\"result\\\": 2\\n}")
+ Message = TextChatMessage.Assistant("{\n \"result\": 2\n}")
}
]
},
@@ -283,7 +327,7 @@ public static class MessageFormat
{
Model = "qwen-max",
Input =
- new TextGenerationInput { Messages = [ChatMessage.User("请问 1+1 是多少?")] },
+ new TextGenerationInput { Messages = [TextChatMessage.User("请问 1+1 是多少?")] },
Parameters = new TextGenerationParameters
{
ResultFormat = "message",
@@ -307,7 +351,53 @@ public static class MessageFormat
new TextGenerationChoice
{
FinishReason = "stop",
- Message = ChatMessage.Assistant(
+ Message = TextChatMessage.Assistant(
+ "1+1 等于 2。这是最基本的数学加法之一,在十进制计数体系中,任何情况下 1 加上另一个 1 的结果都是 2。")
+ }
+ ]
+ },
+ RequestId = "d272255f-82d7-9cc7-93c5-17ff77024349",
+ Usage = new TextGenerationTokenUsage
+ {
+ TotalTokens = 48,
+ OutputTokens = 40,
+ InputTokens = 8
+ }
+ });
+
+ public static readonly RequestSnapshot,
+ ModelResponse>
+ SingleMessageChatClientIncremental = new(
+ "single-generation-message",
+ new ModelRequest
+ {
+ Model = "qwen-max",
+ Input =
+ new TextGenerationInput { Messages = [TextChatMessage.User("请问 1+1 是多少?")] },
+ Parameters = new TextGenerationParameters
+ {
+ ResultFormat = "message",
+ Seed = 1234,
+ MaxTokens = 1500,
+ TopP = 0.8f,
+ TopK = 100,
+ RepetitionPenalty = 1.1f,
+ Temperature = 0.85f,
+ Stop = new[] { "你好" },
+ IncrementalOutput = true,
+ ToolChoice = ToolChoice.AutoChoice
+ }
+ },
+ new ModelResponse
+ {
+ Output = new TextGenerationOutput
+ {
+ Choices =
+ [
+ new TextGenerationChoice
+ {
+ FinishReason = "stop",
+ Message = TextChatMessage.Assistant(
"1+1 等于 2。这是最基本的数学加法之一,在十进制计数体系中,任何情况下 1 加上另一个 1 的结果都是 2。")
}
]
@@ -329,7 +419,7 @@ public static readonly
new ModelRequest
{
Model = "qwen-max",
- Input = new TextGenerationInput { Messages = [ChatMessage.User("杭州现在的天气如何?")] },
+ Input = new TextGenerationInput { Messages = [TextChatMessage.User("杭州现在的天气如何?")] },
Parameters = new TextGenerationParameters()
{
ResultFormat = "message",
@@ -369,13 +459,83 @@ public static readonly
new TextGenerationChoice
{
FinishReason = "stop",
- Message = ChatMessage.Assistant(
+ Message = TextChatMessage.Assistant(
+ string.Empty,
+ toolCalls:
+ [
+ new ToolCall(
+ "call_cec4c19d27624537b583af",
+ ToolTypes.Function,
+ 0,
+ new FunctionCall(
+ "get_current_weather",
+ """{"location": "浙江省杭州市"}"""))
+ ])
+ }
+ ]
+ },
+ RequestId = "67300049-c108-9987-b1c1-8e0ee2de6b5d",
+ Usage = new TextGenerationTokenUsage
+ {
+ InputTokens = 211,
+ OutputTokens = 8,
+ TotalTokens = 219
+ }
+ });
+
+ public static readonly
+ RequestSnapshot,
+ ModelResponse> SingleMessageChatClientWithTools =
+ new(
+ "single-generation-message-with-tools",
+ new ModelRequest
+ {
+ Model = "qwen-max",
+ Input = new TextGenerationInput { Messages = [TextChatMessage.User("杭州现在的天气如何?")] },
+ Parameters = new TextGenerationParameters()
+ {
+ ResultFormat = "message",
+ Seed = 1234,
+ MaxTokens = 1500,
+ TopP = 0.8f,
+ TopK = 100,
+ RepetitionPenalty = 1.1f,
+ PresencePenalty = 1.2f,
+ Temperature = 0.85f,
+ Tools =
+ [
+ new ToolDefinition(
+ "function",
+ new FunctionDefinition(
+ "get_current_weather",
+ "获取现在的天气",
+ new JsonSchemaBuilder().FromType(
+ new SchemaGeneratorConfiguration
+ {
+ PropertyNameResolver = PropertyNameResolvers.LowerSnakeCase
+ })
+ .Build()))
+ ],
+ ToolChoice = ToolChoice.FunctionChoice("get_current_weather")
+ }
+ },
+ new ModelResponse
+ {
+ Output = new TextGenerationOutput
+ {
+ Choices =
+ [
+ new TextGenerationChoice
+ {
+ FinishReason = "stop",
+ Message = TextChatMessage.Assistant(
string.Empty,
toolCalls:
[
new ToolCall(
"call_cec4c19d27624537b583af",
ToolTypes.Function,
+ 0,
new FunctionCall(
"get_current_weather",
"""{"location": "浙江省杭州市"}"""))
@@ -403,8 +563,8 @@ public static readonly
{
Messages =
[
- ChatMessage.User("请对“春天来了,大地”这句话进行续写,来表达春天的美好和作者的喜悦之情"),
- ChatMessage.Assistant("春天来了,大地", true)
+ TextChatMessage.User("请对“春天来了,大地”这句话进行续写,来表达春天的美好和作者的喜悦之情"),
+ TextChatMessage.Assistant("春天来了,大地", true)
]
},
Parameters = new TextGenerationParameters()
@@ -430,7 +590,7 @@ public static readonly
{
FinishReason = "stop",
Message =
- ChatMessage.Assistant(
+ TextChatMessage.Assistant(
"仿佛从漫长的冬眠中苏醒过来,万物复苏。嫩绿的小草悄悄地探出了头,争先恐后地想要沐浴在温暖的阳光下;五彩斑斓的花朵也不甘示弱,竞相绽放着自己最美丽的姿态,将田野、山林装扮得分外妖娆。微风轻轻吹过,带来了泥土的气息与花香混合的独特香味,让人心旷神怡。小鸟们开始忙碌起来,在枝头欢快地歌唱,似乎也在庆祝这个充满希望的新季节的到来。这一切美好景象不仅让人感受到了大自然的魅力所在,更激发了人们对生活无限热爱和向往的心情。")
}
]
@@ -455,9 +615,9 @@ public static readonly
{
Messages =
[
- ChatMessage.User("现在请你记住一个数字,42"),
- ChatMessage.Assistant("好的,我已经记住了这个数字。"),
- ChatMessage.User("请问我刚才提到的数字是多少?")
+ TextChatMessage.User("现在请你记住一个数字,42"),
+ TextChatMessage.Assistant("好的,我已经记住了这个数字。"),
+ TextChatMessage.User("请问我刚才提到的数字是多少?")
]
},
Parameters = new TextGenerationParameters
@@ -482,7 +642,7 @@ public static readonly
[
new TextGenerationChoice
{
- FinishReason = "stop", Message = ChatMessage.Assistant("您刚才提到的数字是42。")
+ FinishReason = "stop", Message = TextChatMessage.Assistant("您刚才提到的数字是42。")
}
]
},
@@ -507,9 +667,9 @@ public static readonly
{
Messages =
[
- ChatMessage.File(
+ TextChatMessage.File(
["file-fe-WTTG89tIUTd4ByqP3K48R3bn", "file-fe-l92iyRvJm9vHCCfonLckf1o2"]),
- ChatMessage.User("这两个文件是相同的吗?")
+ TextChatMessage.User("这两个文件是相同的吗?")
]
},
Parameters = new TextGenerationParameters
@@ -535,7 +695,7 @@ public static readonly
new TextGenerationChoice
{
FinishReason = "stop",
- Message = ChatMessage.Assistant(
+ Message = TextChatMessage.Assistant(
"你上传的两个文件并不相同。第一个文件`test1.txt`包含两行文本,每行都是“测试”。而第二个文件`test2.txt`只有一行文本,“测试2”。尽管它们都含有“测试”这个词,但具体内容和结构不同。")
}
]
@@ -607,6 +767,56 @@ public static class MultimodalGeneration
}
});
+ public static readonly RequestSnapshot,
+ ModelResponse> VlChatClientNoSse =
+ new(
+ "multimodal-generation-vl",
+ new ModelRequest
+ {
+ Model = "qwen-vl-plus",
+ Input = new MultimodalInput
+ {
+ Messages =
+ [
+ MultimodalMessage.User(
+ [
+ MultimodalMessageContent.ImageContent(
+ "https://dashscope.oss-cn-beijing.aliyuncs.com/images/dog_and_girl.jpeg"),
+ MultimodalMessageContent.TextContent("这个图片是哪里,请用简短的语言回答")
+ ])
+ ]
+ },
+ Parameters = new MultimodalParameters
+ {
+ Seed = 1234,
+ TopK = 100,
+ TopP = 0.81f,
+ Temperature = 1.1f,
+ RepetitionPenalty = 1.3f,
+ PresencePenalty = 1.2f,
+ MaxTokens = 120,
+ }
+ },
+ new ModelResponse
+ {
+ Output = new MultimodalOutput(
+ [
+ new MultimodalChoice(
+ "stop",
+ MultimodalMessage.Assistant(
+ [
+ MultimodalMessageContent.TextContent("海滩。")
+ ]))
+ ]),
+ RequestId = "e81aa922-be6c-9f9d-bd4f-0f43e21fd913",
+ Usage = new MultimodalTokenUsage
+ {
+ OutputTokens = 3,
+ InputTokens = 3613,
+ ImageTokens = 3577
+ }
+ });
+
public static readonly RequestSnapshot,
ModelResponse> VlSse =
new(
@@ -657,6 +867,54 @@ public static class MultimodalGeneration
}
});
+ public static readonly RequestSnapshot,
+ ModelResponse> VlChatClientSse =
+ new(
+ "multimodal-generation-vl",
+ new ModelRequest
+ {
+ Model = "qwen-vl-plus",
+ Input = new MultimodalInput
+ {
+ Messages =
+ [
+ MultimodalMessage.User(
+ [
+ MultimodalMessageContent.ImageContent(
+ "https://dashscope.oss-cn-beijing.aliyuncs.com/images/dog_and_girl.jpeg"),
+ MultimodalMessageContent.TextContent("这个图片是哪里,请用简短的语言回答")
+ ])
+ ]
+ },
+ Parameters = new MultimodalParameters
+ {
+ IncrementalOutput = true,
+ Seed = 1234,
+ TopK = 100,
+ TopP = 0.81f,
+ }
+ },
+ new ModelResponse
+ {
+ Output = new MultimodalOutput(
+ [
+ new MultimodalChoice(
+ "stop",
+ MultimodalMessage.Assistant(
+ [
+ MultimodalMessageContent.TextContent(
+ "这是一个海滩,有沙滩和海浪。在前景中坐着一个女人与她的宠物狗互动。背景中有海水、阳光及远处的海岸线。由于没有具体标识物或地标信息,我无法提供更精确的位置描述。这可能是一个公共海滩或是私人区域。重要的是要注意不要泄露任何个人隐私,并遵守当地的规定和法律法规。欣赏自然美景的同时请尊重环境和其他访客。")
+ ]))
+ ]),
+ RequestId = "13c5644d-339c-928a-a09a-e0414bfaa95c",
+ Usage = new MultimodalTokenUsage
+ {
+ OutputTokens = 85,
+ InputTokens = 1283,
+ ImageTokens = 1247
+ }
+ });
+
public static readonly RequestSnapshot,
ModelResponse>
OcrNoSse = new(
@@ -982,6 +1240,22 @@ public static class TextEmbedding
Usage = new TextEmbeddingTokenUsage(3)
});
+ public static readonly RequestSnapshot,
+ ModelResponse> EmbeddingClientNoSse = new(
+ "text-embedding",
+ new ModelRequest
+ {
+ Input = new TextEmbeddingInput { Texts = ["代码改变世界"] },
+ Model = "text-embedding-v3",
+ Parameters = new TextEmbeddingParameters { Dimension = 1024 }
+ },
+ new ModelResponse
+ {
+ Output = new TextEmbeddingOutput([new TextEmbeddingItem(0, [])]),
+ RequestId = "1773f7b2-2148-9f74-b335-b413e398a116",
+ Usage = new TextEmbeddingTokenUsage(3)
+ });
+
public static readonly
RequestSnapshot,
ModelResponse> BatchNoSse = new(
@@ -1015,7 +1289,7 @@ public static readonly
"tokenization",
new ModelRequest
{
- Input = new TextGenerationInput { Messages = [ChatMessage.User("代码改变世界")] },
+ Input = new TextGenerationInput { Messages = [TextChatMessage.User("代码改变世界")] },
Model = "qwen-max",
Parameters = new TextGenerationParameters { Seed = 1234 }
},