diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 4dd60f452..ff6616c2d 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,12 +1,14 @@
name: CI
on:
push:
- branches-ignore:
- - 'generated'
- - 'codegen/**'
- - 'integrated/**'
- - 'stl-preview-head/**'
- - 'stl-preview-base/**'
+ branches:
+ - '**'
+ - '!integrated/**'
+ - '!stl-preview-head/**'
+ - '!stl-preview-base/**'
+ - '!generated'
+ - '!codegen/**'
+ - 'codegen/stl/**'
pull_request:
branches-ignore:
- 'stl-preview-head/**'
@@ -17,7 +19,7 @@ jobs:
timeout-minutes: 10
name: lint
runs-on: ${{ github.repository == 'stainless-sdks/anthropic-csharp' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
- if: github.event_name == 'push' || github.event.pull_request.head.repo.fork
+ if: (github.event_name == 'push' || github.event.pull_request.head.repo.fork)
steps:
- uses: actions/checkout@v6
@@ -36,7 +38,7 @@ jobs:
timeout-minutes: 10
name: build
runs-on: ${{ github.repository == 'stainless-sdks/anthropic-csharp' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
- if: github.event_name == 'push' || github.event.pull_request.head.repo.fork
+ if: (github.event_name == 'push' || github.event.pull_request.head.repo.fork)
steps:
- uses: actions/checkout@v6
diff --git a/.gitignore b/.gitignore
index cb0f7844e..1ce7f8483 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
.prism.log
+.stdy.log
bin/
obj/
.vs/
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 79386130e..9781711eb 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,6 +1,6 @@
{
- "src/Anthropic": "12.9.0",
+ "src/Anthropic": "12.10.0",
"src/Anthropic.Foundry": "0.5.0",
- "src/Anthropic.Bedrock": "0.1.0",
+ "src/Anthropic.Bedrock": "0.1.1",
"src/Anthropic.Vertex": "0.1.0"
}
diff --git a/.stats.yml b/.stats.yml
index 8691776c3..6dd5a3a0d 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 33
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic%2Fanthropic-3f1132b9be2d8218a7255103c40a0fbfc2b6d65db76f160db4477f9221493561.yml
-openapi_spec_hash: 58021ab18daccd5c45a930ffd7d6ab4d
-config_hash: 6debadaa8ff30c5b5ee61176ef42a5fc
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic%2Fanthropic-3088a13fc94214c1b39c66c09fe0af997aed84b412f2fda478af89a9c016f3e6.yml
+openapi_spec_hash: ebeeaa9a9bf7603f0bbcce30389e27ca
+config_hash: 6f5727994013c43f452e6ac0b4d5e92a
diff --git a/scripts/mock b/scripts/mock
index b7f3e9319..fcdc20068 100755
--- a/scripts/mock
+++ b/scripts/mock
@@ -22,9 +22,9 @@ echo "==> Starting mock server with URL ${URL}"
# Run prism mock on the given spec
if [ "$1" == "--daemon" ]; then
# Pre-install the package so the download doesn't eat into the startup timeout
- npm exec --package=@stdy/cli@0.19.3 -- steady --version
+ npm exec --package=@stdy/cli@0.20.1 -- steady --version
- npm exec --package=@stdy/cli@0.19.3 -- steady --host 127.0.0.1 -p 4010 --validator-query-array-format=comma --validator-query-object-format=brackets "$URL" &> .stdy.log &
+ npm exec --package=@stdy/cli@0.20.1 -- steady --host 127.0.0.1 -p 4010 --validator-query-array-format=comma --validator-form-array-format=comma --validator-query-object-format=brackets --validator-form-object-format=brackets "$URL" &> .stdy.log &
# Wait for server to come online via health endpoint (max 30s)
echo -n "Waiting for server"
@@ -53,5 +53,5 @@ if [ "$1" == "--daemon" ]; then
echo
else
- npm exec --package=@stdy/cli@0.19.3 -- steady --host 127.0.0.1 -p 4010 --validator-query-array-format=comma --validator-query-object-format=brackets "$URL"
+ npm exec --package=@stdy/cli@0.20.1 -- steady --host 127.0.0.1 -p 4010 --validator-query-array-format=comma --validator-form-array-format=comma --validator-query-object-format=brackets --validator-form-object-format=brackets "$URL"
fi
diff --git a/scripts/test b/scripts/test
index 101123b67..4ae0e03a4 100755
--- a/scripts/test
+++ b/scripts/test
@@ -47,7 +47,7 @@ elif ! prism_is_running ; then
echo -e "To run the server, pass in the path or url of your OpenAPI"
echo -e "spec to the prism command:"
echo
- echo -e " \$ ${YELLOW}npm exec --package=@stdy/cli@0.19.3 -- steady path/to/your.openapi.yml --host 127.0.0.1 -p 4010 --validator-query-array-format=comma --validator-query-object-format=brackets${NC}"
+ echo -e " \$ ${YELLOW}npm exec --package=@stdy/cli@0.20.1 -- steady path/to/your.openapi.yml --host 127.0.0.1 -p 4010 --validator-query-array-format=comma --validator-form-array-format=comma --validator-query-object-format=brackets --validator-form-object-format=brackets${NC}"
echo
exit 1
diff --git a/src/Anthropic.Bedrock/Anthropic.Bedrock.csproj b/src/Anthropic.Bedrock/Anthropic.Bedrock.csproj
index c237158d8..a3d1005c6 100644
--- a/src/Anthropic.Bedrock/Anthropic.Bedrock.csproj
+++ b/src/Anthropic.Bedrock/Anthropic.Bedrock.csproj
@@ -7,9 +7,13 @@
13.0
Anthropic.Bedrock
- 0.1.0
+ 0.1.1
+
+
+
+
diff --git a/src/Anthropic.Bedrock/CHANGELOG.md b/src/Anthropic.Bedrock/CHANGELOG.md
index 10095107a..35cc054f3 100644
--- a/src/Anthropic.Bedrock/CHANGELOG.md
+++ b/src/Anthropic.Bedrock/CHANGELOG.md
@@ -1,5 +1,13 @@
# Changelog
+## 0.1.1 (2026-03-31)
+
+Full Changelog: [Bedrock-v0.1.0...Bedrock-v0.1.1](https://github.com/anthropics/anthropic-sdk-csharp/compare/Bedrock-v0.1.0...Bedrock-v0.1.1)
+
+### Bug Fixes
+
+* handle oversized SSE events in Bedrock SseEventContentWrapper ([#147](https://github.com/anthropics/anthropic-sdk-csharp/issues/147)) ([dcbf8cc](https://github.com/anthropics/anthropic-sdk-csharp/commit/dcbf8ccb4dc4f1d67fe85e3ff1248517bea6af23))
+
## 0.1.0 (2026-03-16)
Full Changelog: [Bedrock-v0.0.1...Bedrock-v0.1.0](https://github.com/anthropics/anthropic-sdk-csharp/compare/Bedrock-v0.0.1...Bedrock-v0.1.0)
diff --git a/src/Anthropic.Bedrock/SseEventContentWrapper.cs b/src/Anthropic.Bedrock/SseEventContentWrapper.cs
index 0ee5df4c7..31a644a96 100644
--- a/src/Anthropic.Bedrock/SseEventContentWrapper.cs
+++ b/src/Anthropic.Bedrock/SseEventContentWrapper.cs
@@ -45,6 +45,7 @@ protected override bool TryComputeLength(out long length)
private class SseLazyEventStream : Stream
{
private readonly Stream _sourceStream;
+ private Memory _remainder = Memory.Empty;
public SseLazyEventStream(Stream source)
{
@@ -69,6 +70,15 @@ public override async ValueTask ReadAsync(
CancellationToken cancellationToken = default
)
{
+ // Return buffered remainder from a previous oversized event first
+ if (_remainder.Length > 0)
+ {
+ var toCopy = Math.Min(_remainder.Length, buffer.Length);
+ _remainder[..toCopy].CopyTo(buffer);
+ _remainder = _remainder[toCopy..];
+ return toCopy;
+ }
+
var (data, success) = await AwsEventStreamHelpers
.ReadStreamMessage(_sourceStream, cancellationToken)
.ConfigureAwait(false);
@@ -78,8 +88,13 @@ public override async ValueTask ReadAsync(
}
var encodedData = Encoding.UTF8.GetBytes(data!);
- encodedData.CopyTo(buffer);
- return encodedData.Length;
+ var bytesToCopy = Math.Min(encodedData.Length, buffer.Length);
+ encodedData.AsMemory(0, bytesToCopy).CopyTo(buffer);
+ if (bytesToCopy < encodedData.Length)
+ {
+ _remainder = encodedData.AsMemory(bytesToCopy);
+ }
+ return bytesToCopy;
}
#else
public override async Task ReadAsync(
@@ -89,6 +104,15 @@ public override async Task ReadAsync(
CancellationToken cancellationToken
)
{
+ // Return buffered remainder from a previous oversized event first
+ if (_remainder.Length > 0)
+ {
+ var toCopy = Math.Min(_remainder.Length, count);
+ _remainder[..toCopy].CopyTo(buffer.AsMemory(offset));
+ _remainder = _remainder[toCopy..];
+ return toCopy;
+ }
+
var (data, success) = await AwsEventStreamHelpers
.ReadStreamMessage(_sourceStream, cancellationToken)
.ConfigureAwait(false);
@@ -98,8 +122,13 @@ CancellationToken cancellationToken
}
var encodedData = Encoding.UTF8.GetBytes(data!);
- encodedData.CopyTo(buffer, offset);
- return encodedData.Length;
+ var bytesToCopy = Math.Min(encodedData.Length, count);
+ Array.Copy(encodedData, 0, buffer, offset, bytesToCopy);
+ if (bytesToCopy < encodedData.Length)
+ {
+ _remainder = encodedData.AsMemory(bytesToCopy);
+ }
+ return bytesToCopy;
}
#endif
diff --git a/src/Anthropic.Tests/AnthropicClientBetaExtensionsTests.cs b/src/Anthropic.Tests/AnthropicClientBetaExtensionsTests.cs
index 7226e84cc..ec29756bd 100644
--- a/src/Anthropic.Tests/AnthropicClientBetaExtensionsTests.cs
+++ b/src/Anthropic.Tests/AnthropicClientBetaExtensionsTests.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using System.IO;
using System.Linq;
using System.Text.Json;
using System.Threading.Tasks;
@@ -450,7 +451,7 @@ public async Task GetResponseAsync_McpToolUseBlock_CreatesCorrectContent()
);
Assert.NotNull(mcpToolCall);
Assert.Equal("mcp_call_123", mcpToolCall.CallId);
- Assert.Equal("search", mcpToolCall.ToolName);
+ Assert.Equal("search", mcpToolCall.Name);
Assert.Equal("my-mcp-server", mcpToolCall.ServerName);
Assert.NotNull(mcpToolCall.Arguments);
Assert.True(mcpToolCall.Arguments.ContainsKey("query"));
@@ -511,305 +512,13 @@ public async Task GetResponseAsync_McpToolResultBlock_WithTextContent()
);
Assert.NotNull(mcpResult);
Assert.Equal("mcp_call_456", mcpResult.CallId);
- Assert.NotNull(mcpResult.Output);
- Assert.Single(mcpResult.Output);
- Assert.Equal("Result from MCP tool", ((TextContent)mcpResult.Output[0]).Text);
+ Assert.NotNull(mcpResult.Outputs);
+ Assert.Single(mcpResult.Outputs);
+ Assert.Equal("Result from MCP tool", ((TextContent)mcpResult.Outputs[0]).Text);
Assert.NotNull(mcpResult.RawRepresentation);
Assert.IsType(mcpResult.RawRepresentation);
}
- [Fact]
- public async Task GetResponseAsync_WithSimpleResponseFormat_ReturnsStructuredJSON()
- {
- VerbatimHttpHandler handler = new(
- expectedRequest: """
- {
- "max_tokens": 1024,
- "model": "claude-sonnet-4-5-20250929",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Tell me about Albert Einstein. Respond with his name and age at death."
- }]
- }],
- "output_config": {
- "format": {
- "type": "json_schema",
- "schema": {
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "age": { "type": "integer" }
- },
- "required": ["name", "age"],
- "additionalProperties": false
- }
- }
- }
- }
- """,
- actualResponse: """
- {
- "id": "msg_format_01",
- "type": "message",
- "role": "assistant",
- "model": "claude-sonnet-4-5-20250929",
- "content": [{
- "type": "text",
- "text": "{\"name\":\"Albert Einstein\",\"age\":76}"
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 25,
- "output_tokens": 15
- }
- }
- """
- );
-
- IChatClient chatClient = CreateChatClient(handler, "claude-sonnet-4-5-20250929");
-
- ChatOptions options = new()
- {
- ResponseFormat = ChatResponseFormat.ForJsonSchema(
- JsonElement.Parse(
- """
- {
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "age": { "type": "integer" }
- },
- "required": ["name", "age"]
- }
- """
- ),
- "person_info"
- ),
- };
-
- ChatResponse response = await chatClient.GetResponseAsync(
- "Tell me about Albert Einstein. Respond with his name and age at death.",
- options,
- TestContext.Current.CancellationToken
- );
-
- Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("Einstein", textContent.Text);
- Assert.Contains("76", textContent.Text);
- }
-
- [Fact]
- public async Task GetResponseAsync_WithNestedObjectSchema_ReturnsStructuredJSON()
- {
- VerbatimHttpHandler handler = new(
- expectedRequest: """
- {
- "max_tokens": 1024,
- "model": "claude-sonnet-4-5-20250929",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Tell me about the book '1984' by George Orwell."
- }]
- }],
- "output_config": {
- "format": {
- "type": "json_schema",
- "schema": {
- "type": "object",
- "properties": {
- "title": { "type": "string" },
- "author": {
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "birth_year": { "type": "integer" }
- },
- "required": ["name", "birth_year"],
- "additionalProperties": false
- },
- "published_year": {
- "type": "integer"
- }
- },
- "required": ["title", "author", "published_year"],
- "additionalProperties": false
- }
- }
- }
- }
- """,
- actualResponse: """
- {
- "id": "msg_format_02",
- "type": "message",
- "role": "assistant",
- "model": "claude-sonnet-4-5-20250929",
- "content": [{
- "type": "text",
- "text": "{\"title\":\"1984\",\"author\":{\"name\":\"George Orwell\",\"birth_year\":1903},\"published_year\":1949}"
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 30,
- "output_tokens": 25
- }
- }
- """
- );
-
- IChatClient chatClient = CreateChatClient(handler, "claude-sonnet-4-5-20250929");
-
- ChatOptions options = new()
- {
- ResponseFormat = ChatResponseFormat.ForJsonSchema(
- JsonElement.Parse(
- """
- {
- "type": "object",
- "properties": {
- "title": { "type": "string" },
- "author": {
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "birth_year": { "type": "integer" }
- },
- "required": ["name", "birth_year"]
- },
- "published_year": { "type": "integer" }
- },
- "required": ["title", "author", "published_year"]
- }
- """
- ),
- "book_info"
- ),
- };
-
- ChatResponse response = await chatClient.GetResponseAsync(
- "Tell me about the book '1984' by George Orwell.",
- options,
- TestContext.Current.CancellationToken
- );
-
- Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("1984", textContent.Text);
- Assert.Contains("Orwell", textContent.Text);
- Assert.Contains("1903", textContent.Text);
- Assert.Contains("1949", textContent.Text);
- }
-
- [Fact]
- public async Task GetResponseAsync_WithArraySchema_ReturnsStructuredJSON()
- {
- VerbatimHttpHandler handler = new(
- expectedRequest: """
- {
- "max_tokens": 1024,
- "model": "claude-sonnet-4-5-20250929",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "List 3 common fruits: apple, orange, and banana."
- }]
- }],
- "output_config": {
- "format": {
- "type": "json_schema",
- "schema": {
- "type": "object",
- "properties": {
- "fruits": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "color": { "type": "string" },
- "is_citrus": { "type": "boolean" }
- },
- "required": ["name", "color", "is_citrus"],
- "additionalProperties": false
- }
- }
- },
- "required": ["fruits"],
- "additionalProperties": false
- }
- }
- }
- }
- """,
- actualResponse: """
- {
- "id": "msg_format_03",
- "type": "message",
- "role": "assistant",
- "model": "claude-sonnet-4-5-20250929",
- "content": [{
- "type": "text",
- "text": "{\"fruits\":[{\"name\":\"apple\",\"color\":\"red\",\"is_citrus\":false},{\"name\":\"orange\",\"color\":\"orange\",\"is_citrus\":true},{\"name\":\"banana\",\"color\":\"yellow\",\"is_citrus\":false}]}"
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 35,
- "output_tokens": 40
- }
- }
- """
- );
-
- IChatClient chatClient = CreateChatClient(handler, "claude-sonnet-4-5-20250929");
-
- ChatOptions options = new()
- {
- ResponseFormat = ChatResponseFormat.ForJsonSchema(
- JsonElement.Parse(
- """
- {
- "type": "object",
- "properties": {
- "fruits": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "color": { "type": "string" },
- "is_citrus": { "type": "boolean" }
- },
- "required": ["name", "color", "is_citrus"]
- }
- }
- },
- "required": ["fruits"]
- }
- """
- ),
- "fruit_list"
- ),
- };
-
- ChatResponse response = await chatClient.GetResponseAsync(
- "List 3 common fruits: apple, orange, and banana.",
- options,
- TestContext.Current.CancellationToken
- );
-
- Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("apple", textContent.Text);
- Assert.Contains("orange", textContent.Text);
- Assert.Contains("banana", textContent.Text);
- }
-
[Fact]
public async Task GetResponseAsync_WithMultipleBetaToolUnionsAsAITools_FlowsThroughToRequest()
{
@@ -954,71 +663,13 @@ public async Task GetResponseAsync_McpToolResultBlock_WithError()
);
Assert.NotNull(mcpResult);
Assert.Equal("mcp_call_error_1", mcpResult.CallId);
- Assert.NotNull(mcpResult.Output);
- Assert.Single(mcpResult.Output);
+ Assert.NotNull(mcpResult.Outputs);
+ Assert.Single(mcpResult.Outputs);
- ErrorContent errorContent = Assert.IsType(mcpResult.Output[0]);
+ ErrorContent errorContent = Assert.IsType(mcpResult.Outputs[0]);
Assert.Equal("Connection timeout", errorContent.Message);
}
- [Fact]
- public async Task GetResponseAsync_CodeExecutionToolResult_WithError()
- {
- VerbatimHttpHandler handler = new(
- expectedRequest: """
- {
- "max_tokens": 1024,
- "model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Test code execution error"
- }]
- }]
- }
- """,
- actualResponse: """
- {
- "id": "msg_code_error_01",
- "type": "message",
- "role": "assistant",
- "model": "claude-haiku-4-5",
- "content": [{
- "type": "code_execution_tool_result",
- "tool_use_id": "code_exec_error_1",
- "content": {
- "type": "code_execution_tool_result_error",
- "error_code": "execution_time_exceeded"
- }
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 10,
- "output_tokens": 5
- }
- }
- """
- );
-
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- ChatResponse response = await chatClient.GetResponseAsync(
- "Test code execution error",
- new(),
- TestContext.Current.CancellationToken
- );
-
- CodeInterpreterToolResultContent codeResult =
- Assert.IsType(response.Messages[0].Contents[0]);
- Assert.NotNull(codeResult);
- Assert.Equal("code_exec_error_1", codeResult.CallId);
- Assert.NotNull(codeResult.Outputs);
- Assert.Single(codeResult.Outputs);
-
- ErrorContent errorContent = Assert.IsType(codeResult.Outputs[0]);
- Assert.Equal("ExecutionTimeExceeded", errorContent.ErrorCode);
- }
-
[Fact]
public async Task GetResponseAsync_WithFunctionResultContent_HostedFileContent()
{
@@ -1111,7 +762,32 @@ public async Task GetResponseAsync_WithFunctionResultContent_HostedFileContent()
}
[Fact]
- public async Task GetResponseAsync_WithAIFunctionTool_AdditionalProperties_FlowsThrough()
+ public void AsAITool_GetService_ReturnsToolUnion()
+ {
+ BetaToolUnion toolUnion = new BetaWebSearchTool20250305()
+ {
+ AllowedDomains = ["example.com"],
+ };
+ AITool aiTool = toolUnion.AsAITool();
+ Assert.Same(toolUnion, aiTool.GetService());
+
+ Assert.Null(aiTool.GetService("key"));
+ Assert.Null(aiTool.GetService());
+
+ Assert.Contains(nameof(BetaWebSearchTool20250305), aiTool.Name);
+ }
+
+ [Fact]
+ public void AsAITool_GetService_ThrowsOnNullServiceType()
+ {
+ AITool aiTool = (
+ (BetaToolUnion)new BetaWebSearchTool20250305() { AllowedDomains = ["example.com"] }
+ ).AsAITool();
+ Assert.Throws(() => aiTool.GetService(null!, null));
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithHostedFileContent()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
@@ -1121,224 +797,29 @@ public async Task GetResponseAsync_WithAIFunctionTool_AdditionalProperties_Flows
"messages": [{
"role": "user",
"content": [{
- "type": "text",
- "text": "Use enhanced tool"
- }]
- }],
- "tools": [{
- "name": "enhanced_tool",
- "description": "A tool with additional properties",
- "input_schema": {
- "type": "object",
- "properties": {
- "query": {
- "type": "string"
- }
- },
- "required": ["query"]
- },
- "defer_loading": true,
- "strict": true,
- "input_examples": [
- {
- "query": "example query"
+ "type": "document",
+ "source": {
+ "type": "file",
+ "file_id": "file_abc123"
}
- ],
- "allowed_callers": [
- "direct"
- ]
+ }]
}]
}
""",
actualResponse: """
{
- "id": "msg_enhanced_tool_01",
+ "id": "msg_hosted_file_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "Tool is ready"
+ "text": "I read the hosted file."
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 40,
- "output_tokens": 10
- }
- }
- """
- );
-
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
-
- var enhancedFunction = AIFunctionFactory.Create(
- (string query) => "result",
- new AIFunctionFactoryOptions
- {
- Name = "enhanced_tool",
- Description = "A tool with additional properties",
- AdditionalProperties = new Dictionary
- {
- [nameof(BetaTool.DeferLoading)] = true,
- [nameof(BetaTool.Strict)] = true,
- [nameof(BetaTool.InputExamples)] = new List>
- {
- new() { ["query"] = JsonSerializer.SerializeToElement("example query") },
- },
- [nameof(BetaTool.AllowedCallers)] = new List<
- ApiEnum
- >
- {
- new(JsonSerializer.SerializeToElement("direct")),
- },
- },
- }
- );
-
- ChatOptions options = new() { Tools = [enhancedFunction] };
-
- ChatResponse response = await chatClient.GetResponseAsync(
- "Use enhanced tool",
- options,
- TestContext.Current.CancellationToken
- );
- Assert.NotNull(response);
- }
-
- [Fact]
- public async Task GetResponseAsync_WithAIFunctionTool_PartialAdditionalProperties()
- {
- VerbatimHttpHandler handler = new(
- expectedRequest: """
- {
- "max_tokens": 1024,
- "model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Use strict tool"
- }]
- }],
- "tools": [{
- "name": "strict_tool",
- "description": "A tool with only strict property",
- "input_schema": {
- "type": "object",
- "properties": {
- "value": {
- "type": "integer"
- }
- },
- "required": ["value"]
- },
- "strict": true
- }]
- }
- """,
- actualResponse: """
- {
- "id": "msg_strict_tool_01",
- "type": "message",
- "role": "assistant",
- "model": "claude-haiku-4-5",
- "content": [{
- "type": "text",
- "text": "Strict mode enabled"
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 35,
- "output_tokens": 8
- }
- }
- """
- );
-
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
-
- var strictFunction = AIFunctionFactory.Create(
- (int value) => value * 2,
- new AIFunctionFactoryOptions
- {
- Name = "strict_tool",
- Description = "A tool with only strict property",
- AdditionalProperties = new Dictionary
- {
- [nameof(BetaTool.Strict)] = true,
- },
- }
- );
-
- ChatOptions options = new() { Tools = [strictFunction] };
-
- ChatResponse response = await chatClient.GetResponseAsync(
- "Use strict tool",
- options,
- TestContext.Current.CancellationToken
- );
- Assert.NotNull(response);
- }
-
- [Fact]
- public void AsAITool_GetService_ReturnsToolUnion()
- {
- BetaToolUnion toolUnion = new BetaWebSearchTool20250305()
- {
- AllowedDomains = ["example.com"],
- };
- AITool aiTool = toolUnion.AsAITool();
- Assert.Same(toolUnion, aiTool.GetService());
-
- Assert.Null(aiTool.GetService("key"));
- Assert.Null(aiTool.GetService());
-
- Assert.Contains(nameof(BetaWebSearchTool20250305), aiTool.Name);
- }
-
- [Fact]
- public void AsAITool_GetService_ThrowsOnNullServiceType()
- {
- AITool aiTool = (
- (BetaToolUnion)new BetaWebSearchTool20250305() { AllowedDomains = ["example.com"] }
- ).AsAITool();
- Assert.Throws(() => aiTool.GetService(null!, null));
- }
-
- [Fact]
- public async Task GetResponseAsync_WithHostedFileContent()
- {
- VerbatimHttpHandler handler = new(
- expectedRequest: """
- {
- "max_tokens": 1024,
- "model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "document",
- "source": {
- "type": "file",
- "file_id": "file_abc123"
- }
- }]
- }]
- }
- """,
- actualResponse: """
- {
- "id": "msg_hosted_file_01",
- "type": "message",
- "role": "assistant",
- "model": "claude-haiku-4-5",
- "content": [{
- "type": "text",
- "text": "I read the hosted file."
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 20,
- "output_tokens": 6
+ "input_tokens": 20,
+ "output_tokens": 6
}
}
"""
@@ -1357,40 +838,45 @@ [new ChatMessage(ChatRole.User, [hostedFile])],
}
[Fact]
- public async Task GetResponseAsync_WithHostedCodeInterpreterTool()
+ public async Task GetResponseAsync_WithRawRepresentationFactory()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
+ "max_tokens": 2048,
"model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Execute code"
- }]
- }],
- "tools": [{
- "type": "code_execution_20250825",
- "name": "code_execution"
- }]
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Preconfigured message"
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "New message"
+ }]
+ }
+ ]
}
""",
actualResponse: """
{
- "id": "msg_code_exec_01",
+ "id": "msg_factory_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "I can execute code."
+ "text": "Response"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 15,
- "output_tokens": 6
+ "input_tokens": 20,
+ "output_tokens": 5
}
}
"""
@@ -1398,10 +884,27 @@ public async Task GetResponseAsync_WithHostedCodeInterpreterTool()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- ChatOptions options = new() { Tools = [new HostedCodeInterpreterTool()] };
+ ChatOptions options = new()
+ {
+ RawRepresentationFactory = _ => new MessageCreateParams()
+ {
+ MaxTokens = 2048,
+ Model = "claude-haiku-4-5",
+ Messages =
+ [
+ new BetaMessageParam()
+ {
+ Role = Role.User,
+ Content = new BetaMessageParamContent(
+ [new BetaTextBlockParam() { Text = "Preconfigured message" }]
+ ),
+ },
+ ],
+ },
+ };
ChatResponse response = await chatClient.GetResponseAsync(
- "Execute code",
+ "New message",
options,
TestContext.Current.CancellationToken
);
@@ -1409,7 +912,7 @@ public async Task GetResponseAsync_WithHostedCodeInterpreterTool()
}
[Fact]
- public async Task GetResponseAsync_WithRawRepresentationFactory()
+ public async Task GetResponseAsync_WithNonEmptyMessageParams_EmptyMessages()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
@@ -1423,20 +926,13 @@ public async Task GetResponseAsync_WithRawRepresentationFactory()
"type": "text",
"text": "Preconfigured message"
}]
- },
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "New message"
- }]
}
]
}
""",
actualResponse: """
{
- "id": "msg_factory_01",
+ "id": "msg_factory_02",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
@@ -1446,7 +942,7 @@ public async Task GetResponseAsync_WithRawRepresentationFactory()
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 20,
+ "input_tokens": 10,
"output_tokens": 5
}
}
@@ -1475,7 +971,7 @@ [new BetaTextBlockParam() { Text = "Preconfigured message" }]
};
ChatResponse response = await chatClient.GetResponseAsync(
- "New message",
+ [],
options,
TestContext.Current.CancellationToken
);
@@ -1748,15 +1244,16 @@ public async Task GetResponseAsync_McpToolResultWithTextList()
);
Assert.NotNull(mcpResult);
Assert.Equal("mcp_call_789", mcpResult.CallId);
- Assert.NotNull(mcpResult.Output);
- Assert.Equal(2, mcpResult.Output.Count);
- Assert.Equal("First result", ((TextContent)mcpResult.Output[0]).Text);
- Assert.Equal("Second result", ((TextContent)mcpResult.Output[1]).Text);
+ Assert.NotNull(mcpResult.Outputs);
+ Assert.Equal(2, mcpResult.Outputs.Count);
+ Assert.Equal("First result", ((TextContent)mcpResult.Outputs[0]).Text);
+ Assert.Equal("Second result", ((TextContent)mcpResult.Outputs[1]).Text);
}
[Fact]
- public async Task GetResponseAsync_CodeExecutionResult_WithStdout()
+ public async Task GetResponseAsync_WithHostedTools_AddsBetaHeaders()
{
+ IEnumerable? capturedBetaHeaders = null;
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -1766,58 +1263,69 @@ public async Task GetResponseAsync_CodeExecutionResult_WithStdout()
"role": "user",
"content": [{
"type": "text",
- "text": "Run code"
+ "text": "Use both tools"
}]
+ }],
+ "tools": [{
+ "type": "code_execution_20250825",
+ "name": "code_execution"
+ }],
+ "mcp_servers": [{
+ "name": "mcp",
+ "type": "url",
+ "url": "https://mcp.example.com/server"
}]
}
""",
actualResponse: """
{
- "id": "msg_code_stdout_01",
+ "id": "msg_both_beta_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
- "type": "code_execution_tool_result",
- "tool_use_id": "code_exec_1",
- "content": {
- "type": "code_execution_result",
- "stdout": "Hello World\n42\n",
- "stderr": "",
- "return_code": 0,
- "content": []
- }
+ "type": "text",
+ "text": "I have access to both tools."
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 10,
- "output_tokens": 5
+ "input_tokens": 25,
+ "output_tokens": 10
}
}
"""
- );
+ )
+ {
+ OnRequestHeaders = headers =>
+ headers.TryGetValues("anthropic-beta", out capturedBetaHeaders),
+ };
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatOptions options = new()
+ {
+ Tools =
+ [
+ new HostedCodeInterpreterTool(),
+ new HostedMcpServerTool("my-mcp-server", new Uri("https://mcp.example.com/server")),
+ ],
+ };
+
ChatResponse response = await chatClient.GetResponseAsync(
- "Run code",
- new(),
+ "Use both tools",
+ options,
TestContext.Current.CancellationToken
);
-
- CodeInterpreterToolResultContent codeResult =
- Assert.IsType(response.Messages[0].Contents[0]);
- Assert.NotNull(codeResult);
- Assert.Equal("code_exec_1", codeResult.CallId);
- Assert.NotNull(codeResult.Outputs);
- Assert.Single(codeResult.Outputs);
-
- TextContent textOutput = Assert.IsType(codeResult.Outputs[0]);
- Assert.Equal("Hello World\n42\n", textOutput.Text);
+ Assert.NotNull(response);
+ Assert.NotNull(capturedBetaHeaders);
+ Assert.Contains("code-execution-2025-08-25", capturedBetaHeaders);
+ Assert.Contains("mcp-client-2025-11-20", capturedBetaHeaders);
}
[Fact]
- public async Task GetResponseAsync_CodeExecutionResult_WithStderrAndNonZeroReturnCode()
+ public async Task GetResponseAsync_WithHostedToolsAndExistingBetas_PreservesAndDeduplicatesBetas()
{
+ IEnumerable? capturedBetaHeaders = null;
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -1827,27 +1335,29 @@ public async Task GetResponseAsync_CodeExecutionResult_WithStderrAndNonZeroRetur
"role": "user",
"content": [{
"type": "text",
- "text": "Run failing code"
+ "text": "Test"
}]
+ }],
+ "tools": [{
+ "type": "code_execution_20250825",
+ "name": "code_execution"
+ }],
+ "mcp_servers": [{
+ "name": "mcp",
+ "type": "url",
+ "url": "https://mcp.example.com/server"
}]
}
""",
actualResponse: """
{
- "id": "msg_code_stderr_01",
+ "id": "msg_preserve_beta_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
- "type": "code_execution_tool_result",
- "tool_use_id": "code_exec_2",
- "content": {
- "type": "code_execution_result",
- "stdout": "",
- "stderr": "Division by zero error",
- "return_code": 1,
- "content": []
- }
+ "type": "text",
+ "text": "Response"
}],
"stop_reason": "end_turn",
"usage": {
@@ -1856,28 +1366,47 @@ public async Task GetResponseAsync_CodeExecutionResult_WithStderrAndNonZeroRetur
}
}
"""
- );
+ )
+ {
+ OnRequestHeaders = headers =>
+ headers.TryGetValues("anthropic-beta", out capturedBetaHeaders),
+ };
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatOptions options = new()
+ {
+ RawRepresentationFactory = _ => new MessageCreateParams()
+ {
+ MaxTokens = 1024,
+ Model = "claude-haiku-4-5",
+ Messages = [],
+ Betas = ["custom-beta-feature", "code-execution-2025-08-25"],
+ },
+ Tools =
+ [
+ new HostedCodeInterpreterTool(),
+ new HostedMcpServerTool("my-mcp-server", new Uri("https://mcp.example.com/server")),
+ ],
+ };
+
ChatResponse response = await chatClient.GetResponseAsync(
- "Run failing code",
- new(),
+ "Test",
+ options,
TestContext.Current.CancellationToken
);
-
- CodeInterpreterToolResultContent codeResult =
- Assert.IsType(response.Messages[0].Contents[0]);
- Assert.NotNull(codeResult.Outputs);
- Assert.Single(codeResult.Outputs);
-
- ErrorContent errorOutput = Assert.IsType(codeResult.Outputs[0]);
- Assert.Equal("Division by zero error", errorOutput.Message);
- Assert.Equal("1", errorOutput.ErrorCode);
+ Assert.NotNull(response);
+ Assert.NotNull(capturedBetaHeaders);
+ Assert.Equal(3, capturedBetaHeaders.Count());
+ Assert.Contains("custom-beta-feature", capturedBetaHeaders);
+ Assert.Contains("code-execution-2025-08-25", capturedBetaHeaders);
+ Assert.Contains("mcp-client-2025-11-20", capturedBetaHeaders);
}
[Fact]
- public async Task GetResponseAsync_CodeExecutionResult_WithFileOutputs()
+ public async Task GetResponseAsync_IncludesMeaiUserAgentHeader()
{
+ string[]? capturedUserAgentValues = null;
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -1887,33 +1416,20 @@ public async Task GetResponseAsync_CodeExecutionResult_WithFileOutputs()
"role": "user",
"content": [{
"type": "text",
- "text": "Create file"
+ "text": "Test"
}]
}]
}
""",
actualResponse: """
{
- "id": "msg_code_files_01",
+ "id": "msg_meai_header_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
- "type": "code_execution_tool_result",
- "tool_use_id": "code_exec_3",
- "content": {
- "type": "code_execution_result",
- "stdout": "File created",
- "stderr": "",
- "return_code": 0,
- "content": [{
- "type": "code_execution_output",
- "file_id": "file_output_123"
- }, {
- "type": "code_execution_output",
- "file_id": "file_output_456"
- }]
- }
+ "type": "text",
+ "text": "Response"
}],
"stop_reason": "end_turn",
"usage": {
@@ -1922,33 +1438,37 @@ public async Task GetResponseAsync_CodeExecutionResult_WithFileOutputs()
}
}
"""
- );
+ )
+ {
+ OnRequestHeaders = headers =>
+ {
+ // Verify there's exactly one User-Agent header entry
+ Assert.Single(headers, h => h.Key == "User-Agent");
+ if (headers.TryGetValues("User-Agent", out var values))
+ {
+ capturedUserAgentValues = [.. values];
+ }
+ },
+ };
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
ChatResponse response = await chatClient.GetResponseAsync(
- "Create file",
+ "Test",
new(),
TestContext.Current.CancellationToken
);
- CodeInterpreterToolResultContent codeResult =
- Assert.IsType(response.Messages[0].Contents[0]);
- Assert.NotNull(codeResult.Outputs);
- Assert.Equal(3, codeResult.Outputs.Count);
-
- TextContent textOutput = Assert.IsType(codeResult.Outputs[0]);
- Assert.Equal("File created", textOutput.Text);
-
- HostedFileContent fileOutput1 = Assert.IsType(codeResult.Outputs[1]);
- Assert.Equal("file_output_123", fileOutput1.FileId);
-
- HostedFileContent fileOutput2 = Assert.IsType(codeResult.Outputs[2]);
- Assert.Equal("file_output_456", fileOutput2.FileId);
+ Assert.NotNull(response);
+ Assert.NotNull(capturedUserAgentValues);
+ Assert.Contains(capturedUserAgentValues, v => v.Contains("MEAI"));
+ Assert.Contains(capturedUserAgentValues, v => v.Contains("AnthropicClient"));
}
[Fact]
- public async Task GetResponseAsync_BashCodeExecutionResult_WithStdout()
+ public async Task GetStreamingResponseAsync_IncludesMeaiUserAgentHeader()
{
+ string[]? capturedUserAgentValues = null;
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -1958,58 +1478,67 @@ public async Task GetResponseAsync_BashCodeExecutionResult_WithStdout()
"role": "user",
"content": [{
"type": "text",
- "text": "Run bash"
+ "text": "Test streaming"
}]
- }]
+ }],
+ "stream": true
}
""",
actualResponse: """
+ event: message_start
+ data: {"type":"message_start","message":{"id":"msg_stream_meai_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Response"}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0}
+
+ event: message_delta
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":5}}
+
+ event: message_stop
+ data: {"type":"message_stop"}
+
+ """
+ )
+ {
+ OnRequestHeaders = headers =>
{
- "id": "msg_bash_stdout_01",
- "type": "message",
- "role": "assistant",
- "model": "claude-haiku-4-5",
- "content": [{
- "type": "bash_code_execution_tool_result",
- "tool_use_id": "bash_exec_1",
- "content": {
- "type": "bash_code_execution_result",
- "stdout": "Hello from bash\n5\n",
- "stderr": "",
- "return_code": 0,
- "content": []
- }
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 10,
- "output_tokens": 5
+ // Verify there's exactly one User-Agent header entry
+ Assert.Single(headers, h => h.Key == "User-Agent");
+ if (headers.TryGetValues("User-Agent", out var values))
+ {
+ capturedUserAgentValues = [.. values];
}
- }
- """
- );
+ },
+ };
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- ChatResponse response = await chatClient.GetResponseAsync(
- "Run bash",
- new(),
- TestContext.Current.CancellationToken
- );
- CodeInterpreterToolResultContent codeResult =
- Assert.IsType(response.Messages[0].Contents[0]);
- Assert.NotNull(codeResult);
- Assert.Equal("bash_exec_1", codeResult.CallId);
- Assert.NotNull(codeResult.Outputs);
- Assert.Single(codeResult.Outputs);
+ await foreach (
+ var update in chatClient.GetStreamingResponseAsync(
+ "Test streaming",
+ new(),
+ TestContext.Current.CancellationToken
+ )
+ )
+ {
+ // Consume the stream
+ }
- TextContent textOutput = Assert.IsType(codeResult.Outputs[0]);
- Assert.Equal("Hello from bash\n5\n", textOutput.Text);
+ Assert.NotNull(capturedUserAgentValues);
+ Assert.Contains(capturedUserAgentValues, v => v.Contains("MEAI"));
+ Assert.Contains(capturedUserAgentValues, v => v.Contains("AnthropicClient"));
}
[Fact]
- public async Task GetResponseAsync_BashCodeExecutionResult_WithStderrAndNonZeroReturnCode()
+ public async Task GetResponseAsync_MeaiUserAgentHeader_HasCorrectFormat()
{
+ string[]? capturedUserAgentValues = null;
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -2019,27 +1548,20 @@ public async Task GetResponseAsync_BashCodeExecutionResult_WithStderrAndNonZeroR
"role": "user",
"content": [{
"type": "text",
- "text": "Run failing bash"
+ "text": "Test"
}]
}]
}
""",
actualResponse: """
{
- "id": "msg_bash_stderr_01",
+ "id": "msg_meai_format_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
- "type": "bash_code_execution_tool_result",
- "tool_use_id": "bash_exec_2",
- "content": {
- "type": "bash_code_execution_result",
- "stdout": "",
- "stderr": "bash: command not found: nonexistent",
- "return_code": 127,
- "content": []
- }
+ "type": "text",
+ "text": "Response"
}],
"stop_reason": "end_turn",
"usage": {
@@ -2048,28 +1570,44 @@ public async Task GetResponseAsync_BashCodeExecutionResult_WithStderrAndNonZeroR
}
}
"""
- );
+ )
+ {
+ OnRequestHeaders = headers =>
+ {
+ // Verify there's exactly one User-Agent header entry
+ Assert.Single(headers, h => h.Key == "User-Agent");
+ if (headers.TryGetValues("User-Agent", out var values))
+ {
+ capturedUserAgentValues = [.. values];
+ }
+ },
+ };
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
ChatResponse response = await chatClient.GetResponseAsync(
- "Run failing bash",
+ "Test",
new(),
TestContext.Current.CancellationToken
);
- CodeInterpreterToolResultContent codeResult =
- Assert.IsType(response.Messages[0].Contents[0]);
- Assert.NotNull(codeResult.Outputs);
- Assert.Single(codeResult.Outputs);
-
- ErrorContent errorOutput = Assert.IsType(codeResult.Outputs[0]);
- Assert.Equal("bash: command not found: nonexistent", errorOutput.Message);
- Assert.Equal("127", errorOutput.ErrorCode);
+ Assert.NotNull(response);
+ Assert.NotNull(capturedUserAgentValues);
+ // Verify the MEAI user-agent is present and has correct format (MEAI or MEAI/version)
+ Assert.Contains(
+ capturedUserAgentValues,
+ v => v.StartsWith("MEAI", StringComparison.Ordinal)
+ );
+ Assert.Contains(capturedUserAgentValues, v => v.Contains("AnthropicClient"));
}
[Fact]
- public async Task GetResponseAsync_BashCodeExecutionResult_WithFileOutputs()
+ public async Task GetResponseAsync_MeaiUserAgentHeader_PresentAlongsideDefaultHeaders()
{
+ bool hasAnthropicVersion = false;
+ bool hasMeaiUserAgent = false;
+ bool hasDefaultUserAgent = false;
+
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -2079,33 +1617,20 @@ public async Task GetResponseAsync_BashCodeExecutionResult_WithFileOutputs()
"role": "user",
"content": [{
"type": "text",
- "text": "Create files with bash"
+ "text": "Test"
}]
}]
}
""",
actualResponse: """
{
- "id": "msg_bash_files_01",
+ "id": "msg_headers_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
- "type": "bash_code_execution_tool_result",
- "tool_use_id": "bash_exec_3",
- "content": {
- "type": "bash_code_execution_result",
- "stdout": "Files created successfully",
- "stderr": "",
- "return_code": 0,
- "content": [{
- "type": "bash_code_execution_output",
- "file_id": "file_bash_123"
- }, {
- "type": "bash_code_execution_output",
- "file_id": "file_bash_456"
- }]
- }
+ "type": "text",
+ "text": "Response"
}],
"stop_reason": "end_turn",
"usage": {
@@ -2114,106 +1639,107 @@ public async Task GetResponseAsync_BashCodeExecutionResult_WithFileOutputs()
}
}
"""
- );
+ )
+ {
+ OnRequestHeaders = headers =>
+ {
+ // Verify there's exactly one User-Agent header entry
+ Assert.Single(headers, h => h.Key == "User-Agent");
+ hasAnthropicVersion = headers.Contains("anthropic-version");
+ if (headers.TryGetValues("User-Agent", out var values))
+ {
+ var valuesArray = values.ToArray();
+ hasMeaiUserAgent = valuesArray.Any(v => v.Contains("MEAI"));
+ hasDefaultUserAgent = valuesArray.Any(v => v.Contains("AnthropicClient"));
+ }
+ },
+ };
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
ChatResponse response = await chatClient.GetResponseAsync(
- "Create files with bash",
+ "Test",
new(),
TestContext.Current.CancellationToken
);
- CodeInterpreterToolResultContent codeResult =
- Assert.IsType(response.Messages[0].Contents[0]);
- Assert.NotNull(codeResult.Outputs);
- Assert.Equal(3, codeResult.Outputs.Count);
-
- TextContent textOutput = Assert.IsType(codeResult.Outputs[0]);
- Assert.Equal("Files created successfully", textOutput.Text);
-
- HostedFileContent fileOutput1 = Assert.IsType(codeResult.Outputs[1]);
- Assert.Equal("file_bash_123", fileOutput1.FileId);
-
- HostedFileContent fileOutput2 = Assert.IsType(codeResult.Outputs[2]);
- Assert.Equal("file_bash_456", fileOutput2.FileId);
+ Assert.NotNull(response);
+ Assert.True(hasAnthropicVersion, "anthropic-version header should be present");
+ Assert.True(hasMeaiUserAgent, "MEAI user-agent header should be present");
+ Assert.True(
+ hasDefaultUserAgent,
+ "Default AnthropicClient user-agent header should be present"
+ );
}
[Fact]
- public async Task GetResponseAsync_WithHostedTools_AddsBetaHeaders()
+ public async Task GetResponseAsync_WithReasoningEffort_IgnoredWhenThinkingAlreadyConfigured()
{
- IEnumerable? capturedBetaHeaders = null;
+ // When RawRepresentationFactory pre-configures Thinking, the Reasoning option should be ignored.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
+ "max_tokens": 50000,
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "text",
- "text": "Use both tools"
+ "text": "Think carefully"
}]
}],
- "tools": [{
- "type": "code_execution_20250825",
- "name": "code_execution"
- }],
- "mcp_servers": [{
- "name": "mcp",
- "type": "url",
- "url": "https://mcp.example.com/server"
- }]
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 5000
+ }
}
""",
actualResponse: """
{
- "id": "msg_both_beta_01",
+ "id": "msg_reasoning_preconfigured",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "I have access to both tools."
+ "text": "Response"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 25,
- "output_tokens": 10
+ "input_tokens": 10,
+ "output_tokens": 15
}
}
"""
- )
- {
- OnRequestHeaders = headers =>
- headers.TryGetValues("anthropic-beta", out capturedBetaHeaders),
- };
+ );
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
ChatOptions options = new()
{
- Tools =
- [
- new HostedCodeInterpreterTool(),
- new HostedMcpServerTool("my-mcp-server", new Uri("https://mcp.example.com/server")),
- ],
+ // RawRepresentationFactory sets Thinking to enabled with 5000 budget.
+ // Reasoning.Effort should be ignored since Thinking is already configured.
+ RawRepresentationFactory = _ => new MessageCreateParams()
+ {
+ MaxTokens = 50000,
+ Model = "claude-haiku-4-5",
+ Messages = [],
+ Thinking = new BetaThinkingConfigParam(new BetaThinkingConfigEnabled(5000)),
+ },
+ Reasoning = new() { Effort = ReasoningEffort.ExtraHigh },
};
ChatResponse response = await chatClient.GetResponseAsync(
- "Use both tools",
+ "Think carefully",
options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
- Assert.NotNull(capturedBetaHeaders);
- Assert.Contains("code-execution-2025-08-25", capturedBetaHeaders);
- Assert.Contains("mcp-client-2025-11-20", capturedBetaHeaders);
}
[Fact]
- public async Task GetResponseAsync_WithHostedToolsAndExistingBetas_PreservesAndDeduplicatesBetas()
+ public async Task GetResponseAsync_WithAIFunctionTool_AllowedCallers_FlowsThrough()
{
- IEnumerable? capturedBetaHeaders = null;
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -2223,340 +1749,474 @@ public async Task GetResponseAsync_WithHostedToolsAndExistingBetas_PreservesAndD
"role": "user",
"content": [{
"type": "text",
- "text": "Test"
+ "text": "Use tool"
}]
}],
"tools": [{
- "type": "code_execution_20250825",
- "name": "code_execution"
- }],
- "mcp_servers": [{
- "name": "mcp",
- "type": "url",
- "url": "https://mcp.example.com/server"
+ "name": "callers_tool",
+ "description": "A tool with allowed callers",
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "value": {
+ "type": "integer"
+ }
+ },
+ "required": ["value"],
+ "additionalProperties": false
+ },
+ "allowed_callers": [
+ "direct"
+ ]
}]
}
""",
actualResponse: """
{
- "id": "msg_preserve_beta_01",
+ "id": "msg_callers_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "Response"
+ "text": "Done"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 10,
+ "input_tokens": 30,
"output_tokens": 5
}
}
"""
- )
- {
- OnRequestHeaders = headers =>
- headers.TryGetValues("anthropic-beta", out capturedBetaHeaders),
- };
+ );
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- ChatOptions options = new()
- {
- RawRepresentationFactory = _ => new MessageCreateParams()
+ var function = AIFunctionFactory.Create(
+ (int value) => value,
+ new AIFunctionFactoryOptions
{
- MaxTokens = 1024,
- Model = "claude-haiku-4-5",
- Messages = [],
- Betas = ["custom-beta-feature", "code-execution-2025-08-25"],
- },
- Tools =
- [
- new HostedCodeInterpreterTool(),
- new HostedMcpServerTool("my-mcp-server", new Uri("https://mcp.example.com/server")),
- ],
- };
+ Name = "callers_tool",
+ Description = "A tool with allowed callers",
+ AdditionalProperties = new Dictionary
+ {
+ [nameof(BetaTool.AllowedCallers)] = new List<
+ ApiEnum
+ >
+ {
+ new(JsonSerializer.SerializeToElement("direct")),
+ },
+ },
+ }
+ );
+
+ ChatOptions options = new() { Tools = [function] };
ChatResponse response = await chatClient.GetResponseAsync(
- "Test",
+ "Use tool",
options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
- Assert.NotNull(capturedBetaHeaders);
- Assert.Equal(3, capturedBetaHeaders.Count());
- Assert.Contains("custom-beta-feature", capturedBetaHeaders);
- Assert.Contains("code-execution-2025-08-25", capturedBetaHeaders);
- Assert.Contains("mcp-client-2025-11-20", capturedBetaHeaders);
}
[Fact]
- public async Task GetResponseAsync_IncludesMeaiUserAgentHeader()
+ public void AsIHostedFileClient_ReturnsInstance()
{
- string[]? capturedUserAgentValues = null;
- VerbatimHttpHandler handler = new(
- expectedRequest: """
- {
- "max_tokens": 1024,
- "model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Test"
- }]
- }]
- }
- """,
- actualResponse: """
- {
- "id": "msg_meai_header_01",
- "type": "message",
- "role": "assistant",
- "model": "claude-haiku-4-5",
- "content": [{
- "type": "text",
- "text": "Response"
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 10,
- "output_tokens": 5
- }
- }
- """
- )
+ var client = new AnthropicClient { ApiKey = "test-key" }.Beta;
+ IHostedFileClient fileClient = client.AsIHostedFileClient();
+ Assert.NotNull(fileClient);
+ }
+
+ [Fact]
+ public void AsIHostedFileClient_ReturnsHostedFileClientMetadata()
+ {
+ var client = new AnthropicClient { ApiKey = "test-key" }.Beta;
+ IHostedFileClient fileClient = client.AsIHostedFileClient();
+ var metadata = fileClient.GetService();
+ Assert.NotNull(metadata);
+ Assert.Equal("anthropic", metadata.ProviderName);
+ }
+
+ [Fact]
+ public void AsIHostedFileClient_ThrowsOnNull()
+ {
+ Anthropic.Services.IBetaService betaService = null!;
+ Anthropic.Services.Beta.IFileService fileService = null!;
+
+ Assert.Throws(() => betaService.AsIHostedFileClient());
+ Assert.Throws(() => fileService.AsIHostedFileClient());
+ }
+
+ [Fact]
+ public async Task IHostedFileClient_UploadAsync_NullContent_Throws()
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
{
- OnRequestHeaders = headers =>
- {
- // Verify there's exactly one User-Agent header entry
- Assert.Single(headers, h => h.Key == "User-Agent");
- if (headers.TryGetValues("User-Agent", out var values))
- {
- capturedUserAgentValues = [.. values];
- }
- },
- };
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ await Assert.ThrowsAsync(
+ "content",
+ () =>
+ fileClient.UploadAsync(
+ null!,
+ cancellationToken: TestContext.Current.CancellationToken
+ )
+ );
+ }
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ [Theory]
+ [InlineData(null)]
+ [InlineData("")]
+ public async Task IHostedFileClient_DownloadAsync_InvalidFileId_Throws(string? fileId)
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
+ {
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ ArgumentException ex = await Assert.ThrowsAnyAsync(() =>
+ fileClient.DownloadAsync(
+ fileId!,
+ cancellationToken: TestContext.Current.CancellationToken
+ )
+ );
+ Assert.Equal("fileId", ex.ParamName);
+ }
- ChatResponse response = await chatClient.GetResponseAsync(
- "Test",
- new(),
- TestContext.Current.CancellationToken
+ [Theory]
+ [InlineData(null)]
+ [InlineData("")]
+ public async Task IHostedFileClient_GetFileInfoAsync_InvalidFileId_Throws(string? fileId)
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
+ {
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ ArgumentException ex = await Assert.ThrowsAnyAsync(() =>
+ fileClient.GetFileInfoAsync(
+ fileId!,
+ cancellationToken: TestContext.Current.CancellationToken
+ )
);
+ Assert.Equal("fileId", ex.ParamName);
+ }
- Assert.NotNull(response);
- Assert.NotNull(capturedUserAgentValues);
- Assert.Contains(capturedUserAgentValues, v => v.Contains("MEAI"));
- Assert.Contains(capturedUserAgentValues, v => v.Contains("AnthropicClient"));
+ [Theory]
+ [InlineData(null)]
+ [InlineData("")]
+ public async Task IHostedFileClient_DeleteAsync_InvalidFileId_Throws(string? fileId)
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
+ {
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ ArgumentException ex = await Assert.ThrowsAnyAsync(() =>
+ fileClient.DeleteAsync(
+ fileId!,
+ cancellationToken: TestContext.Current.CancellationToken
+ )
+ );
+ Assert.Equal("fileId", ex.ParamName);
}
[Fact]
- public async Task GetStreamingResponseAsync_IncludesMeaiUserAgentHeader()
+ public void IHostedFileClient_GetService_NullServiceType_Throws()
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
+ {
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ Assert.Throws("serviceType", () => fileClient.GetService(null!));
+ }
+
+ [Fact]
+ public void IHostedFileClient_GetService_NonNullServiceKey_ReturnsNull()
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
+ {
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ Assert.Null(fileClient.GetService(typeof(HostedFileClientMetadata), "key"));
+ }
+
+ [Fact]
+ public void IHostedFileClient_GetService_ReturnsSelf()
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
+ {
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ Assert.Same(fileClient, fileClient.GetService());
+ }
+
+ [Fact]
+ public void IHostedFileClient_GetService_UnknownType_ReturnsNull()
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
+ {
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ Assert.Null(fileClient.GetService(typeof(string)));
+ }
+
+ [Fact]
+ public void IHostedFileClient_GetService_Metadata_HasProviderUri()
+ {
+ using IHostedFileClient fileClient = new AnthropicClient
+ {
+ ApiKey = "test-key",
+ }.Beta.AsIHostedFileClient();
+ var metadata = fileClient.GetService();
+ Assert.NotNull(metadata);
+ Assert.NotNull(metadata.ProviderUri);
+ }
+
+ [Fact]
+ public async Task IHostedFileClient_GetFileInfoAsync_ReturnsHostedFileContent()
{
- string[]? capturedUserAgentValues = null;
VerbatimHttpHandler handler = new(
- expectedRequest: """
+ "",
+ """
{
- "max_tokens": 1024,
- "model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Test streaming"
- }]
- }],
- "stream": true
+ "id": "file_abc123",
+ "created_at": "2024-01-01T00:00:00+00:00",
+ "filename": "test.txt",
+ "mime_type": "text/plain",
+ "size_bytes": 100,
+ "type": "file"
}
- """,
- actualResponse: """
- event: message_start
- data: {"type":"message_start","message":{"id":"msg_stream_meai_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
+ """
+ );
- event: content_block_start
- data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
+ using IHostedFileClient fileClient = CreateAnthropicClient(handler)
+ .Beta.AsIHostedFileClient();
+ HostedFileContent? result = await fileClient.GetFileInfoAsync(
+ "file_abc123",
+ cancellationToken: TestContext.Current.CancellationToken
+ );
- event: content_block_delta
- data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Response"}}
+ Assert.NotNull(result);
+ Assert.Equal("file_abc123", result.FileId);
+ Assert.Equal("text/plain", result.MediaType);
+ Assert.Equal("test.txt", result.Name);
+ Assert.Equal(100, result.SizeInBytes);
+ Assert.NotNull(result.CreatedAt);
+ Assert.IsType(result.RawRepresentation);
+ }
- event: content_block_stop
- data: {"type":"content_block_stop","index":0}
+ [Fact]
+ public async Task IHostedFileClient_DeleteAsync_ReturnsTrue()
+ {
+ VerbatimHttpHandler handler = new(
+ "",
+ """
+ {
+ "id": "file_abc123",
+ "type": "file_deleted"
+ }
+ """
+ );
- event: message_delta
- data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":5}}
+ using IHostedFileClient fileClient = CreateAnthropicClient(handler)
+ .Beta.AsIHostedFileClient();
+ bool result = await fileClient.DeleteAsync(
+ "file_abc123",
+ cancellationToken: TestContext.Current.CancellationToken
+ );
- event: message_stop
- data: {"type":"message_stop"}
+ Assert.True(result);
+ }
+ [Fact]
+ public async Task IHostedFileClient_UploadAsync_ReturnsHostedFileContent()
+ {
+ VerbatimHttpHandler handler = new(
+ "",
"""
- )
- {
- OnRequestHeaders = headers =>
{
- // Verify there's exactly one User-Agent header entry
- Assert.Single(headers, h => h.Key == "User-Agent");
- if (headers.TryGetValues("User-Agent", out var values))
- {
- capturedUserAgentValues = [.. values];
- }
- },
- };
+ "id": "file_new123",
+ "created_at": "2024-06-15T10:30:00+00:00",
+ "filename": "report.pdf",
+ "mime_type": "application/pdf",
+ "size_bytes": 5000,
+ "type": "file"
+ }
+ """
+ );
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ using IHostedFileClient fileClient = CreateAnthropicClient(handler)
+ .Beta.AsIHostedFileClient();
+ using var stream = new MemoryStream([1, 2, 3]);
- await foreach (
- var update in chatClient.GetStreamingResponseAsync(
- "Test streaming",
- new(),
- TestContext.Current.CancellationToken
- )
- )
- {
- // Consume the stream
- }
+ HostedFileContent result = await fileClient.UploadAsync(
+ stream,
+ "application/pdf",
+ "report.pdf",
+ cancellationToken: TestContext.Current.CancellationToken
+ );
- Assert.NotNull(capturedUserAgentValues);
- Assert.Contains(capturedUserAgentValues, v => v.Contains("MEAI"));
- Assert.Contains(capturedUserAgentValues, v => v.Contains("AnthropicClient"));
+ Assert.Equal("file_new123", result.FileId);
+ Assert.Equal("application/pdf", result.MediaType);
+ Assert.Equal("report.pdf", result.Name);
+ Assert.Equal(5000, result.SizeInBytes);
}
[Fact]
- public async Task GetResponseAsync_MeaiUserAgentHeader_HasCorrectFormat()
+ public async Task IHostedFileClient_UploadAsync_NullMediaType_InfersFromFileName()
{
- string[]? capturedUserAgentValues = null;
VerbatimHttpHandler handler = new(
- expectedRequest: """
+ "",
+ """
{
- "max_tokens": 1024,
- "model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Test"
- }]
- }]
+ "id": "file_inferred",
+ "created_at": "2024-06-15T10:30:00+00:00",
+ "filename": "data.csv",
+ "mime_type": "text/csv",
+ "size_bytes": 500,
+ "type": "file"
}
- """,
- actualResponse: """
+ """
+ );
+
+ using IHostedFileClient fileClient = CreateAnthropicClient(handler)
+ .Beta.AsIHostedFileClient();
+ using var stream = new MemoryStream([1, 2, 3]);
+
+ HostedFileContent result = await fileClient.UploadAsync(
+ stream,
+ null,
+ "data.csv",
+ cancellationToken: TestContext.Current.CancellationToken
+ );
+
+ Assert.Equal("file_inferred", result.FileId);
+ }
+
+ [Fact]
+ public async Task IHostedFileClient_UploadAsync_NullFileName_GeneratesFromMediaType()
+ {
+ VerbatimHttpHandler handler = new(
+ "",
+ """
{
- "id": "msg_meai_format_01",
- "type": "message",
- "role": "assistant",
- "model": "claude-haiku-4-5",
- "content": [{
- "type": "text",
- "text": "Response"
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 10,
- "output_tokens": 5
- }
+ "id": "file_gen",
+ "created_at": "2024-06-15T10:30:00+00:00",
+ "filename": "generated.pdf",
+ "mime_type": "application/pdf",
+ "size_bytes": 100,
+ "type": "file"
}
"""
- )
- {
- OnRequestHeaders = headers =>
- {
- // Verify there's exactly one User-Agent header entry
- Assert.Single(headers, h => h.Key == "User-Agent");
- if (headers.TryGetValues("User-Agent", out var values))
- {
- capturedUserAgentValues = [.. values];
- }
- },
- };
+ );
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ using IHostedFileClient fileClient = CreateAnthropicClient(handler)
+ .Beta.AsIHostedFileClient();
+ using var stream = new MemoryStream([1, 2, 3]);
- ChatResponse response = await chatClient.GetResponseAsync(
- "Test",
- new(),
- TestContext.Current.CancellationToken
+ HostedFileContent result = await fileClient.UploadAsync(
+ stream,
+ "application/pdf",
+ null,
+ cancellationToken: TestContext.Current.CancellationToken
);
- Assert.NotNull(response);
- Assert.NotNull(capturedUserAgentValues);
- // Verify the MEAI user-agent is present and has correct format (MEAI or MEAI/version)
- Assert.Contains(
- capturedUserAgentValues,
- v => v.StartsWith("MEAI", StringComparison.Ordinal)
+ Assert.Equal("file_gen", result.FileId);
+ }
+
+ [Fact]
+ public async Task IHostedFileClient_DownloadAsync_ReturnsReadableStream()
+ {
+ VerbatimHttpHandler handler = new("", "test file content");
+
+ using IHostedFileClient fileClient = CreateAnthropicClient(handler)
+ .Beta.AsIHostedFileClient();
+ using HostedFileDownloadStream downloadStream = await fileClient.DownloadAsync(
+ "file_abc123",
+ cancellationToken: TestContext.Current.CancellationToken
);
- Assert.Contains(capturedUserAgentValues, v => v.Contains("AnthropicClient"));
+
+ Assert.NotNull(downloadStream);
+ Assert.True(downloadStream.CanRead);
+ Assert.False(downloadStream.CanWrite);
+
+ using var reader = new StreamReader(downloadStream);
+ string content = await reader.ReadToEndAsync(
+#if NET
+ TestContext.Current.CancellationToken
+#endif
+ );
+ Assert.Contains("test file content", content);
}
[Fact]
- public async Task GetResponseAsync_MeaiUserAgentHeader_PresentAlongsideDefaultHeaders()
+ public async Task IHostedFileClient_DownloadAsync_StreamWriteThrows()
{
- bool hasAnthropicVersion = false;
- bool hasMeaiUserAgent = false;
- bool hasDefaultUserAgent = false;
+ VerbatimHttpHandler handler = new("", "content");
+
+ using IHostedFileClient fileClient = CreateAnthropicClient(handler)
+ .Beta.AsIHostedFileClient();
+ using HostedFileDownloadStream stream = await fileClient.DownloadAsync(
+ "file_abc123",
+ cancellationToken: TestContext.Current.CancellationToken
+ );
+ Assert.Throws(() => stream.Write(new byte[1], 0, 1));
+ Assert.Throws(() => stream.SetLength(0));
+ }
+
+ [Fact]
+ public async Task IHostedFileClient_ListFilesAsync_ReturnsFiles()
+ {
VerbatimHttpHandler handler = new(
- expectedRequest: """
- {
- "max_tokens": 1024,
- "model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Test"
- }]
- }]
- }
- """,
- actualResponse: """
+ "",
+ """
{
- "id": "msg_headers_01",
- "type": "message",
- "role": "assistant",
- "model": "claude-haiku-4-5",
- "content": [{
- "type": "text",
- "text": "Response"
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 10,
- "output_tokens": 5
- }
+ "data": [
+ {
+ "id": "file_1",
+ "created_at": "2024-01-01T00:00:00+00:00",
+ "filename": "a.txt",
+ "mime_type": "text/plain",
+ "size_bytes": 10,
+ "type": "file"
+ },
+ {
+ "id": "file_2",
+ "created_at": "2024-01-02T00:00:00+00:00",
+ "filename": "b.pdf",
+ "mime_type": "application/pdf",
+ "size_bytes": 2000,
+ "type": "file"
+ }
+ ],
+ "first_id": "file_1",
+ "has_more": false,
+ "last_id": null
}
"""
- )
- {
- OnRequestHeaders = headers =>
- {
- // Verify there's exactly one User-Agent header entry
- Assert.Single(headers, h => h.Key == "User-Agent");
- hasAnthropicVersion = headers.Contains("anthropic-version");
- if (headers.TryGetValues("User-Agent", out var values))
- {
- var valuesArray = values.ToArray();
- hasMeaiUserAgent = valuesArray.Any(v => v.Contains("MEAI"));
- hasDefaultUserAgent = valuesArray.Any(v => v.Contains("AnthropicClient"));
- }
- },
- };
+ );
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ using IHostedFileClient fileClient = CreateAnthropicClient(handler)
+ .Beta.AsIHostedFileClient();
- ChatResponse response = await chatClient.GetResponseAsync(
- "Test",
- new(),
- TestContext.Current.CancellationToken
- );
+ List files = new();
+ await foreach (
+ HostedFileContent file in fileClient.ListFilesAsync(
+ cancellationToken: TestContext.Current.CancellationToken
+ )
+ )
+ {
+ files.Add(file);
+ }
- Assert.NotNull(response);
- Assert.True(hasAnthropicVersion, "anthropic-version header should be present");
- Assert.True(hasMeaiUserAgent, "MEAI user-agent header should be present");
- Assert.True(
- hasDefaultUserAgent,
- "Default AnthropicClient user-agent header should be present"
- );
+ Assert.Equal(2, files.Count);
+ Assert.Equal("file_1", files[0].FileId);
+ Assert.Equal("a.txt", files[0].Name);
+ Assert.Equal("text/plain", files[0].MediaType);
+ Assert.Equal(10, files[0].SizeInBytes);
+ Assert.Equal("file_2", files[1].FileId);
+ Assert.Equal("b.pdf", files[1].Name);
+ Assert.Equal("application/pdf", files[1].MediaType);
+ Assert.Equal(2000, files[1].SizeInBytes);
}
}
diff --git a/src/Anthropic.Tests/AnthropicClientExtensionsTests.cs b/src/Anthropic.Tests/AnthropicClientExtensionsTests.cs
index bb9c50a14..cf5610586 100644
--- a/src/Anthropic.Tests/AnthropicClientExtensionsTests.cs
+++ b/src/Anthropic.Tests/AnthropicClientExtensionsTests.cs
@@ -1,8 +1,10 @@
using System;
using System.Collections.Generic;
using System.Linq;
+using System.Text.Json;
using System.Threading.Tasks;
using Anthropic;
+using Anthropic.Core;
using Anthropic.Models.Messages;
#pragma warning disable IDE0130 // Namespace does not match folder structure
@@ -336,6 +338,73 @@ [new TextBlockParam() { Text = "Preconfigured message" }]
Assert.NotNull(response);
}
+ [Fact]
+ public async Task GetResponseAsync_WithNonEmptyMessageParams_EmptyMessages()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 2048,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Preconfigured message"
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_factory_02",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Response"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatOptions options = new()
+ {
+ RawRepresentationFactory = _ => new MessageCreateParams()
+ {
+ MaxTokens = 2048,
+ Model = "claude-haiku-4-5",
+ Messages =
+ [
+ new MessageParam()
+ {
+ Role = Role.User,
+ Content = new MessageParamContent(
+ [new TextBlockParam() { Text = "Preconfigured message" }]
+ ),
+ },
+ ],
+ },
+ };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ [],
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
[Fact]
public async Task GetResponseAsync_WithRawRepresentationFactory_SystemMessagesMerged()
{
@@ -816,4 +885,150 @@ public async Task GetResponseAsync_MeaiUserAgentHeader_PresentAlongsideDefaultHe
"Default AnthropicClient user-agent header should be present"
);
}
+
+ [Fact]
+ public async Task GetResponseAsync_WithReasoningEffort_IgnoredWhenThinkingAlreadyConfigured()
+ {
+ // When RawRepresentationFactory pre-configures Thinking, the Reasoning option should be ignored.
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 50000,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Think carefully"
+ }]
+ }],
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 5000
+ }
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_reasoning_preconfigured",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Response"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 15
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatOptions options = new()
+ {
+ // RawRepresentationFactory sets Thinking to enabled with 5000 budget.
+ // Reasoning.Effort should be ignored since Thinking is already configured.
+ RawRepresentationFactory = _ => new MessageCreateParams()
+ {
+ MaxTokens = 50000,
+ Model = "claude-haiku-4-5",
+ Messages = [],
+ Thinking = new ThinkingConfigParam(new ThinkingConfigEnabled(5000)),
+ },
+ Reasoning = new() { Effort = ReasoningEffort.ExtraHigh },
+ };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Think carefully",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithAIFunctionTool_AllowedCallers_FlowsThrough()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Use tool"
+ }]
+ }],
+ "tools": [{
+ "name": "callers_tool",
+ "description": "A tool with allowed callers",
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "value": {
+ "type": "integer"
+ }
+ },
+ "required": ["value"],
+ "additionalProperties": false
+ },
+ "allowed_callers": [
+ "direct"
+ ]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_callers_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Done"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 30,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ var function = AIFunctionFactory.Create(
+ (int value) => value,
+ new AIFunctionFactoryOptions
+ {
+ Name = "callers_tool",
+ Description = "A tool with allowed callers",
+ AdditionalProperties = new Dictionary
+ {
+ [nameof(Tool.AllowedCallers)] = new List>
+ {
+ new(JsonSerializer.SerializeToElement("direct")),
+ },
+ },
+ }
+ );
+
+ ChatOptions options = new() { Tools = [function] };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Use tool",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
}
diff --git a/src/Anthropic.Tests/AnthropicClientExtensionsTestsBase.cs b/src/Anthropic.Tests/AnthropicClientExtensionsTestsBase.cs
index 5149aff65..940b376f6 100644
--- a/src/Anthropic.Tests/AnthropicClientExtensionsTestsBase.cs
+++ b/src/Anthropic.Tests/AnthropicClientExtensionsTestsBase.cs
@@ -9,7 +9,10 @@
using System.Threading;
using System.Threading.Tasks;
using Anthropic;
+using Anthropic.Core;
+using Anthropic.Models.Messages;
+#pragma warning disable MEAI001 // [Experimental] APIs in Microsoft.Extensions.AI
#pragma warning disable IDE0130 // Namespace does not match folder structure
namespace Microsoft.Extensions.AI.Tests;
@@ -903,7 +906,8 @@ public async Task GetResponseAsync_WithToolCalls()
"location": { "type": "string", "description": "The city and state" },
"unit": { "type": "string", "description": "Temperature unit" }
},
- "required": ["location", "unit"]
+ "required": ["location", "unit"],
+ "additionalProperties": false
}
}]
}
@@ -996,7 +1000,7 @@ public async Task GetResponseAsync_WithParameterlessToolCall()
"input_schema": {
"type": "object",
"properties": {},
- "required": []
+ "additionalProperties": false
}
}]
}
@@ -1452,7 +1456,8 @@ public async Task GetResponseAsync_WithToolModeAuto()
"properties": {
"location": { "type": "string", "description": "The location" }
},
- "required": ["location"]
+ "required": ["location"],
+ "additionalProperties": false
}
}]
}
@@ -1530,7 +1535,8 @@ public async Task GetResponseAsync_WithToolModeRequired()
"properties": {
"location": { "type": "string", "description": "The location" }
},
- "required": ["location"]
+ "required": ["location"],
+ "additionalProperties": false
}
}]
}
@@ -1615,7 +1621,8 @@ public async Task GetResponseAsync_WithToolModeNone()
"properties": {
"location": { "type": "string", "description": "The location" }
},
- "required": ["location"]
+ "required": ["location"],
+ "additionalProperties": false
}
}]
}
@@ -1690,7 +1697,8 @@ public async Task GetResponseAsync_WithMultipleToolCalls()
"properties": {
"location": { "type": "string" }
},
- "required": ["location"]
+ "required": ["location"],
+ "additionalProperties": false
}
}]
}
@@ -2709,54 +2717,48 @@ public async Task GetResponseAsync_WithNullFinishReason()
Assert.Null(response.FinishReason);
}
- [Fact]
- public async Task GetResponseAsync_SendsTextReasoningAsThinkingBlock()
+ [Theory]
+ [InlineData(ReasoningEffort.Low, 1024)]
+ [InlineData(ReasoningEffort.Medium, 8192)]
+ [InlineData(ReasoningEffort.High, 16384)]
+ [InlineData(ReasoningEffort.ExtraHigh, 32768)]
+ public async Task GetResponseAsync_WithReasoningEffort_SetsThinkingEnabled(
+ ReasoningEffort effort,
+ int expectedBudgetTokens
+ )
{
VerbatimHttpHandler handler = new(
- expectedRequest: """
+ expectedRequest: $$"""
{
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Think about this"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "thinking",
- "thinking": "My detailed reasoning...",
- "signature": "sig_123"
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "What did you conclude?"
- }]
- }
- ],
- "max_tokens": 1024
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Think carefully"
+ }]
+ }],
+ "max_tokens": 100000,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": {{expectedBudgetTokens}}
+ }
}
""",
actualResponse: """
{
- "id": "msg_reasoning_sent_01",
+ "id": "msg_reasoning_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "Response after thinking"
+ "text": "Here is my response"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 30,
- "output_tokens": 10
+ "input_tokens": 10,
+ "output_tokens": 20
}
}
"""
@@ -2764,71 +2766,54 @@ public async Task GetResponseAsync_SendsTextReasoningAsThinkingBlock()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List messages =
- [
- new(ChatRole.User, "Think about this"),
- new(
- ChatRole.Assistant,
- [new TextReasoningContent("My detailed reasoning...") { ProtectedData = "sig_123" }]
- ),
- new(ChatRole.User, "What did you conclude?"),
- ];
+ ChatOptions options = new()
+ {
+ MaxOutputTokens = 100000,
+ Reasoning = new() { Effort = effort },
+ };
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
- new(),
+ "Think carefully",
+ options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
}
[Fact]
- public async Task GetResponseAsync_SendsRedactedTextReasoningAsRedactedThinkingBlock()
+ public async Task GetResponseAsync_WithReasoningEffortNone_SetsThinkingDisabled()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Previous question"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "redacted_thinking",
- "data": "encrypted_data_xyz"
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Follow up question"
- }]
- }
- ],
- "max_tokens": 1024
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Simple question"
+ }]
+ }],
+ "max_tokens": 1024,
+ "thinking": {
+ "type": "disabled"
+ }
}
""",
actualResponse: """
{
- "id": "msg_redacted_sent_01",
+ "id": "msg_reasoning_02",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "Response after redacted thinking"
+ "text": "Quick answer"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 30,
- "output_tokens": 10
+ "input_tokens": 10,
+ "output_tokens": 5
}
}
"""
@@ -2836,53 +2821,42 @@ public async Task GetResponseAsync_SendsRedactedTextReasoningAsRedactedThinkingB
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List messages =
- [
- new(ChatRole.User, "Previous question"),
- new(
- ChatRole.Assistant,
- [new TextReasoningContent(string.Empty) { ProtectedData = "encrypted_data_xyz" }]
- ),
- new(ChatRole.User, "Follow up question"),
- ];
+ ChatOptions options = new() { Reasoning = new() { Effort = ReasoningEffort.None } };
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
- new(),
+ "Simple question",
+ options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
}
[Fact]
- public async Task GetResponseAsync_SkipsEmptyTextReasoningContent()
+ public async Task GetResponseAsync_WithReasoningEffort_ClampsBudgetToExplicitMaxTokens()
{
+ // High effort maps to 16384, but caller explicitly set max_tokens to 5000,
+ // so budget should clamp to 4999.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Question"
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Follow up"
- }]
- }
- ]
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Think carefully"
+ }]
+ }],
+ "max_tokens": 5000,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 4999
+ }
}
""",
actualResponse: """
{
- "id": "msg_skip_empty_01",
+ "id": "msg_reasoning_03",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
@@ -2892,8 +2866,8 @@ public async Task GetResponseAsync_SkipsEmptyTextReasoningContent()
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 20,
- "output_tokens": 5
+ "input_tokens": 10,
+ "output_tokens": 15
}
}
"""
@@ -2901,245 +2875,187 @@ public async Task GetResponseAsync_SkipsEmptyTextReasoningContent()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List messages =
- [
- new(ChatRole.User, "Question"),
- new(ChatRole.Assistant, [new TextReasoningContent(null)]),
- new(ChatRole.User, "Follow up"),
- ];
+ ChatOptions options = new()
+ {
+ MaxOutputTokens = 5000,
+ Reasoning = new() { Effort = ReasoningEffort.High },
+ };
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
- new(),
+ "Think carefully",
+ options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
}
[Fact]
- public async Task GetStreamingResponseAsync_WithThinkingBlockInStream()
+ public async Task GetResponseAsync_WithReasoningEffort_SkipsThinkingWhenExplicitMaxTokensTooSmall()
{
+ // Medium effort maps to 8192, but caller explicitly set max_tokens to 1024,
+ // so after clamping budget would be 1023 which is < 1024 minimum. Thinking is skipped.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "text",
- "text": "Analyze this problem"
+ "text": "Think carefully"
}]
}],
- "stream": true
+ "max_tokens": 1024
}
""",
actualResponse: """
- event: message_start
- data: {"type":"message_start","message":{"id":"msg_thinking_stream_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
-
- event: content_block_start
- data: {"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":"","signature":"sig_123"}}
-
- event: content_block_delta
- data: {"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":"Let me analyze this..."}}
-
- event: content_block_stop
- data: {"type":"content_block_stop","index":0}
-
- event: content_block_start
- data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}
-
- event: content_block_delta
- data: {"type":"content_block_delta","index":1,"delta":{"type":"text_delta","text":"Based on my analysis"}}
-
- event: content_block_stop
- data: {"type":"content_block_stop","index":1}
-
- event: message_delta
- data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":10}}
-
- event: message_stop
- data: {"type":"message_stop"}
-
+ {
+ "id": "msg_reasoning_04",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Response"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 15
+ }
+ }
"""
);
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List updates = [];
- await foreach (
- var update in chatClient.GetStreamingResponseAsync(
- "Analyze this problem",
- new(),
- TestContext.Current.CancellationToken
- )
- )
+ ChatOptions options = new()
{
- updates.Add(update);
- }
-
- Assert.NotEmpty(updates);
- var reasoningUpdates = updates
- .Where(u => u.Contents.Any(c => c is TextReasoningContent))
- .ToList();
- Assert.NotEmpty(reasoningUpdates);
+ MaxOutputTokens = 1024,
+ Reasoning = new() { Effort = ReasoningEffort.Medium },
+ };
- var reasoningContent = reasoningUpdates
- .SelectMany(u => u.Contents.OfType())
- .FirstOrDefault();
- Assert.NotNull(reasoningContent);
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Think carefully",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
}
[Fact]
- public async Task GetStreamingResponseAsync_WithRedactedThinkingBlockInStream()
+ public async Task GetResponseAsync_WithReasoningEffort_AutoIncreasesMaxTokensFromDefault()
{
+ // Medium effort maps to 8192. Default max_tokens is 1024, so max_tokens should
+ // auto-increase to budget (8192) + default (1024) = 9216.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "text",
- "text": "Test redacted thinking"
+ "text": "Think carefully"
}]
}],
- "stream": true
+ "max_tokens": 9216,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 8192
+ }
}
""",
actualResponse: """
- event: message_start
- data: {"type":"message_start","message":{"id":"msg_redacted_stream_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
-
- event: content_block_start
- data: {"type":"content_block_start","index":0,"content_block":{"type":"redacted_thinking","data":"encrypted_xyz"}}
+ {
+ "id": "msg_reasoning_05",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Response"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 15
+ }
+ }
+ """
+ );
- event: content_block_stop
- data: {"type":"content_block_stop","index":0}
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- event: content_block_start
- data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}
+ ChatOptions options = new() { Reasoning = new() { Effort = ReasoningEffort.Medium } };
- event: content_block_delta
- data: {"type":"content_block_delta","index":1,"delta":{"type":"text_delta","text":"Response"}}
-
- event: content_block_stop
- data: {"type":"content_block_stop","index":1}
-
- event: message_delta
- data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":5}}
-
- event: message_stop
- data: {"type":"message_stop"}
-
- """
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Think carefully",
+ options,
+ TestContext.Current.CancellationToken
);
-
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
-
- List updates = [];
- await foreach (
- var update in chatClient.GetStreamingResponseAsync(
- "Test redacted thinking",
- new(),
- TestContext.Current.CancellationToken
- )
- )
- {
- updates.Add(update);
- }
-
- Assert.NotEmpty(updates);
- var reasoningUpdates = updates
- .Where(u => u.Contents.Any(c => c is TextReasoningContent))
- .ToList();
- Assert.NotEmpty(reasoningUpdates);
-
- var reasoningContent = reasoningUpdates
- .SelectMany(u => u.Contents.OfType())
- .FirstOrDefault();
- Assert.NotNull(reasoningContent);
- Assert.Equal(string.Empty, reasoningContent.Text);
- Assert.Equal("encrypted_xyz", reasoningContent.ProtectedData);
+ Assert.NotNull(response);
}
[Fact]
- public async Task GetStreamingResponseAsync_WithSignatureDeltaInStream()
+ public async Task GetResponseAsync_WithReasoningEffortLow_AutoIncreasesFromDefaultMaxTokens()
{
+ // Low effort maps to 1024. Default max_tokens is also 1024, so 1024 <= 1024
+ // triggers auto-increase to budget (1024) + default (1024) = 2048.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "text",
- "text": "Test signature delta"
+ "text": "Think a little"
}]
}],
- "stream": true
+ "max_tokens": 2048,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 1024
+ }
}
""",
actualResponse: """
- event: message_start
- data: {"type":"message_start","message":{"id":"msg_sig_delta_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
-
- event: content_block_start
- data: {"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":"","signature":""}}
-
- event: content_block_delta
- data: {"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":"Analyzing..."}}
-
- event: content_block_delta
- data: {"type":"content_block_delta","index":0,"delta":{"type":"signature_delta","signature":"sig_part1"}}
-
- event: content_block_delta
- data: {"type":"content_block_delta","index":0,"delta":{"type":"signature_delta","signature":"sig_part2"}}
-
- event: content_block_stop
- data: {"type":"content_block_stop","index":0}
-
- event: message_delta
- data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":5}}
-
- event: message_stop
- data: {"type":"message_stop"}
-
+ {
+ "id": "msg_reasoning_06",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Response"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 15
+ }
+ }
"""
);
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List updates = [];
- await foreach (
- var update in chatClient.GetStreamingResponseAsync(
- "Test signature delta",
- new(),
- TestContext.Current.CancellationToken
- )
- )
- {
- updates.Add(update);
- }
-
- Assert.NotEmpty(updates);
- var reasoningUpdates = updates
- .Where(u => u.Contents.Any(c => c is TextReasoningContent))
- .ToList();
- Assert.NotEmpty(reasoningUpdates);
+ ChatOptions options = new() { Reasoning = new() { Effort = ReasoningEffort.Low } };
- var allReasoningContent = reasoningUpdates
- .SelectMany(u => u.Contents.OfType())
- .ToList();
- Assert.NotEmpty(allReasoningContent);
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Think a little",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
}
[Fact]
- public async Task GetResponseAsync_WithThinkingBlockInResponse()
+ public async Task GetResponseAsync_WithReasoningEffort_ExactFitMaxTokensOneBeyondBudget()
{
+ // Low effort maps to 1024. MaxOutputTokens is 1025, so 1025 > 1024 means
+ // no auto-increase needed — budget fits exactly.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -3148,29 +3064,29 @@ public async Task GetResponseAsync_WithThinkingBlockInResponse()
"role": "user",
"content": [{
"type": "text",
- "text": "What is the answer?"
+ "text": "Think a little"
}]
}],
- "max_tokens": 1024
+ "max_tokens": 1025,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 1024
+ }
}
""",
actualResponse: """
{
- "id": "msg_thinking_resp_01",
+ "id": "msg_reasoning_07",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
- "type": "thinking",
- "thinking": "Let me think through this step by step...",
- "signature": "sig_abc123"
- }, {
"type": "text",
- "text": "Based on my analysis, the answer is 42."
+ "text": "Response"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 20,
+ "input_tokens": 10,
"output_tokens": 15
}
}
@@ -3179,31 +3095,25 @@ public async Task GetResponseAsync_WithThinkingBlockInResponse()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatOptions options = new()
+ {
+ MaxOutputTokens = 1025,
+ Reasoning = new() { Effort = ReasoningEffort.Low },
+ };
+
ChatResponse response = await chatClient.GetResponseAsync(
- "What is the answer?",
- new(),
+ "Think a little",
+ options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
-
- Assert.Equal(2, response.Messages[0].Contents.Count);
-
- var reasoningContent = response
- .Messages[0]
- .Contents.OfType()
- .FirstOrDefault();
- Assert.NotNull(reasoningContent);
- Assert.Equal("Let me think through this step by step...", reasoningContent.Text);
- Assert.Equal("sig_abc123", reasoningContent.ProtectedData);
-
- var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
- Assert.NotNull(textContent);
- Assert.Equal("Based on my analysis, the answer is 42.", textContent.Text);
}
[Fact]
- public async Task GetResponseAsync_WithRedactedThinkingBlockInResponse()
+ public async Task GetResponseAsync_WithReasoningEffort_NoAutoIncreaseWhenDefaultMaxTokensSufficient()
{
+ // Low effort maps to 1024. Custom default max_tokens is 5000, so 5000 > 1024 means
+ // no auto-increase is needed.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -3212,60 +3122,53 @@ public async Task GetResponseAsync_WithRedactedThinkingBlockInResponse()
"role": "user",
"content": [{
"type": "text",
- "text": "Tell me your conclusion"
+ "text": "Think a little"
}]
}],
- "max_tokens": 1024
+ "max_tokens": 5000,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 1024
+ }
}
""",
actualResponse: """
{
- "id": "msg_redacted_resp_01",
+ "id": "msg_reasoning_08",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
- "type": "redacted_thinking",
- "data": "encrypted_thinking_data_xyz"
- }, {
"type": "text",
- "text": "Here is my conclusion."
+ "text": "Response"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 20,
- "output_tokens": 10
+ "input_tokens": 10,
+ "output_tokens": 15
}
}
"""
);
- IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ IChatClient chatClient = CreateChatClient(
+ handler,
+ "claude-haiku-4-5",
+ defaultMaxOutputTokens: 5000
+ );
+
+ ChatOptions options = new() { Reasoning = new() { Effort = ReasoningEffort.Low } };
ChatResponse response = await chatClient.GetResponseAsync(
- "Tell me your conclusion",
- new(),
+ "Think a little",
+ options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
-
- Assert.Equal(2, response.Messages[0].Contents.Count);
-
- var reasoningContent = response
- .Messages[0]
- .Contents.OfType()
- .FirstOrDefault();
- Assert.NotNull(reasoningContent);
- Assert.Equal(string.Empty, reasoningContent.Text);
- Assert.Equal("encrypted_thinking_data_xyz", reasoningContent.ProtectedData);
-
- var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
- Assert.NotNull(textContent);
- Assert.Equal("Here is my conclusion.", textContent.Text);
}
[Fact]
- public async Task GetResponseAsync_WithToolUseBlockInResponse()
+ public async Task GetResponseAsync_WithReasoningOutputNone_SetsThinkingDisplayOmitted()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
@@ -3275,33 +3178,35 @@ public async Task GetResponseAsync_WithToolUseBlockInResponse()
"role": "user",
"content": [{
"type": "text",
- "text": "What is 6 times 7?"
+ "text": "Think carefully"
}]
}],
- "max_tokens": 1024
+ "max_tokens": 100000,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 16384,
+ "display": "omitted"
+ }
}
""",
actualResponse: """
{
- "id": "msg_tooluse_resp_01",
+ "id": "msg_reasoning_display_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
- "type": "tool_use",
- "id": "toolu_detailed_01",
- "name": "calculate",
- "input": {
- "operation": "multiply",
- "a": 6,
- "b": 7
- },
- "caller": {"type": "direct"}
+ "type": "thinking",
+ "thinking": "",
+ "signature": "sig_abc123"
+ }, {
+ "type": "text",
+ "text": "Here is my response"
}],
- "stop_reason": "tool_use",
+ "stop_reason": "end_turn",
"usage": {
- "input_tokens": 30,
- "output_tokens": 15
+ "input_tokens": 10,
+ "output_tokens": 20
}
}
"""
@@ -3309,42 +3214,28 @@ public async Task GetResponseAsync_WithToolUseBlockInResponse()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- var calcFunction = AIFunctionFactory.Create(
- (string operation, int a, int b) =>
- {
- return operation == "multiply" ? a * b : 0;
- },
- "calculate"
- );
-
- ChatOptions options = new() { Tools = [calcFunction] };
+ ChatOptions options = new()
+ {
+ MaxOutputTokens = 100000,
+ Reasoning = new() { Effort = ReasoningEffort.High, Output = ReasoningOutput.None },
+ };
ChatResponse response = await chatClient.GetResponseAsync(
- "What is 6 times 7?",
- new(),
+ "Think carefully",
+ options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
-
- Assert.Equal(ChatFinishReason.ToolCalls, response.FinishReason);
-
- var functionCall = response
- .Messages[0]
- .Contents.OfType()
- .FirstOrDefault();
- Assert.NotNull(functionCall);
- Assert.Equal("calculate", functionCall.Name);
- Assert.Equal("toolu_detailed_01", functionCall.CallId);
- Assert.NotNull(functionCall.Arguments);
- Assert.True(functionCall.Arguments.ContainsKey("operation"));
- Assert.Equal("multiply", functionCall.Arguments["operation"]?.ToString());
- Assert.True(functionCall.Arguments.ContainsKey("a"));
- Assert.True(functionCall.Arguments.ContainsKey("b"));
}
- [Fact]
- public async Task GetResponseAsync_WithHostedWebSearchToolOptionTriggersConversion()
+ [Theory]
+ [InlineData(ReasoningOutput.Summary)]
+ [InlineData(ReasoningOutput.Full)]
+ public async Task GetResponseAsync_WithReasoningOutputSummaryOrFull_DoesNotSetDisplay(
+ ReasoningOutput output
+ )
{
+ // Summary and Full should not set display, letting the server default to "summarized".
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -3353,43 +3244,45 @@ public async Task GetResponseAsync_WithHostedWebSearchToolOptionTriggersConversi
"role": "user",
"content": [{
"type": "text",
- "text": "Find recent news about AI"
+ "text": "Think carefully"
}]
}],
- "max_tokens": 1024,
- "tools": [
- {
- "name": "web_search",
- "type": "web_search_20250305"
- }
- ]
+ "max_tokens": 100000,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 16384
+ }
}
""",
actualResponse: """
{
- "id": "msg_websearch_opt_01",
+ "id": "msg_reasoning_display_02",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "I'll search for that information."
+ "text": "Here is my response"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 20,
- "output_tokens": 10
- }
+ "input_tokens": 10,
+ "output_tokens": 20
+ }
}
"""
);
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- ChatOptions options = new() { Tools = [new HostedWebSearchTool()] };
+ ChatOptions options = new()
+ {
+ MaxOutputTokens = 100000,
+ Reasoning = new() { Effort = ReasoningEffort.High, Output = output },
+ };
ChatResponse response = await chatClient.GetResponseAsync(
- "Find recent news about AI",
+ "Think carefully",
options,
TestContext.Current.CancellationToken
);
@@ -3397,8 +3290,10 @@ public async Task GetResponseAsync_WithHostedWebSearchToolOptionTriggersConversi
}
[Fact]
- public async Task GetResponseAsync_WithTextBlockWithoutCitations()
+ public async Task GetResponseAsync_WithReasoningOutputNone_OmittedThinkingYieldsRedactedReasoningContent()
{
+ // When display is omitted, the response contains thinking blocks with empty thinking
+ // but a valid signature. These should map to TextReasoningContent with ProtectedData.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
@@ -3407,26 +3302,35 @@ public async Task GetResponseAsync_WithTextBlockWithoutCitations()
"role": "user",
"content": [{
"type": "text",
- "text": "Tell me about AI"
+ "text": "Think carefully"
}]
}],
- "max_tokens": 1024
+ "max_tokens": 100000,
+ "thinking": {
+ "type": "enabled",
+ "budget_tokens": 16384,
+ "display": "omitted"
+ }
}
""",
actualResponse: """
{
- "id": "msg_no_citations_01",
+ "id": "msg_reasoning_display_03",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
+ "type": "thinking",
+ "thinking": "",
+ "signature": "sig_omitted_abc123"
+ }, {
"type": "text",
- "text": "AI is transforming the world."
+ "text": "Here is my response"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 20,
- "output_tokens": 10
+ "input_tokens": 10,
+ "output_tokens": 20
}
}
"""
@@ -3434,57 +3338,76 @@ public async Task GetResponseAsync_WithTextBlockWithoutCitations()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatOptions options = new()
+ {
+ MaxOutputTokens = 100000,
+ Reasoning = new() { Effort = ReasoningEffort.High, Output = ReasoningOutput.None },
+ };
+
ChatResponse response = await chatClient.GetResponseAsync(
- "Tell me about AI",
- new(),
+ "Think carefully",
+ options,
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
- var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
- Assert.NotNull(textContent);
- Assert.Equal("AI is transforming the world.", textContent.Text);
- Assert.True(textContent.Annotations is null || !textContent.Annotations.Any());
+ // The response should contain a TextReasoningContent with the signature
+ // and empty text (since display was omitted).
+ TextReasoningContent thinkingContent = Assert.IsType(
+ response.Messages.SelectMany(m => m.Contents).OfType().Single()
+ );
+ Assert.Equal(string.Empty, thinkingContent.Text);
+ Assert.Equal("sig_omitted_abc123", thinkingContent.ProtectedData);
}
[Fact]
- public async Task GetResponseAsync_WithWebSearchCitationsInTextBlock()
+ public async Task GetResponseAsync_SendsTextReasoningAsThinkingBlock()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Tell me about recent AI developments with sources"
- }]
- }],
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Think about this"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "thinking",
+ "thinking": "My detailed reasoning...",
+ "signature": "sig_123"
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "What did you conclude?"
+ }]
+ }
+ ],
"max_tokens": 1024
}
""",
actualResponse: """
{
- "id": "msg_with_citations_01",
+ "id": "msg_reasoning_sent_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "According to recent research [1], artificial intelligence is rapidly advancing.",
- "citations": [{
- "type": "web_search_result_location",
- "cited_text": "artificial intelligence is rapidly advancing",
- "title": "AI Research 2024",
- "url": "https://example.com/ai-research",
- "encrypted_index": "enc_idx_123"
- }]
+ "text": "Response after thinking"
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 25,
- "output_tokens": 18
+ "input_tokens": 30,
+ "output_tokens": 10
}
}
"""
@@ -3492,68 +3415,71 @@ public async Task GetResponseAsync_WithWebSearchCitationsInTextBlock()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ List messages =
+ [
+ new(ChatRole.User, "Think about this"),
+ new(
+ ChatRole.Assistant,
+ [new TextReasoningContent("My detailed reasoning...") { ProtectedData = "sig_123" }]
+ ),
+ new(ChatRole.User, "What did you conclude?"),
+ ];
+
ChatResponse response = await chatClient.GetResponseAsync(
- "Tell me about recent AI developments with sources",
+ messages,
new(),
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
-
- var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
- Assert.NotNull(textContent);
- Assert.Contains("artificial intelligence", textContent.Text);
- Assert.NotNull(textContent.Annotations);
- Assert.NotEmpty(textContent.Annotations);
-
- var citation = textContent.Annotations.OfType().FirstOrDefault();
- Assert.NotNull(citation);
- Assert.Equal("AI Research 2024", citation.Title);
- Assert.Equal("artificial intelligence is rapidly advancing", citation.Snippet);
- Assert.NotNull(citation.Url);
- Assert.Equal("https://example.com/ai-research", citation.Url.ToString());
- Assert.Null(citation.AnnotatedRegions);
}
[Fact]
- public async Task GetResponseAsync_WithContentBlockLocationCitationsInTextBlock()
+ public async Task GetResponseAsync_SendsRedactedTextReasoningAsRedactedThinkingBlock()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "What does the document say about ML?"
- }]
- }],
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Previous question"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "redacted_thinking",
+ "data": "encrypted_data_xyz"
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Follow up question"
+ }]
+ }
+ ],
"max_tokens": 1024
}
""",
actualResponse: """
{
- "id": "msg_doc_citations_01",
+ "id": "msg_redacted_sent_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "As stated in the document [1], machine learning requires large datasets.",
- "citations": [{
- "type": "content_block_location",
- "cited_text": "machine learning requires large datasets",
- "document_title": "ML Fundamentals",
- "file_id": "file_abc123",
- "document_index": 0,
- "start_block_index": 15,
- "end_block_index": 45
- }]
+ "text": "Response after redacted thinking"
}],
"stop_reason": "end_turn",
"usage": {
"input_tokens": 30,
- "output_tokens": 20
+ "output_tokens": 10
}
}
"""
@@ -3561,47 +3487,53 @@ public async Task GetResponseAsync_WithContentBlockLocationCitationsInTextBlock(
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ List messages =
+ [
+ new(ChatRole.User, "Previous question"),
+ new(
+ ChatRole.Assistant,
+ [new TextReasoningContent(string.Empty) { ProtectedData = "encrypted_data_xyz" }]
+ ),
+ new(ChatRole.User, "Follow up question"),
+ ];
+
ChatResponse response = await chatClient.GetResponseAsync(
- "What does the document say about ML?",
+ messages,
new(),
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
-
- var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
- Assert.NotNull(textContent);
- Assert.NotNull(textContent.Annotations);
- Assert.NotEmpty(textContent.Annotations);
-
- var citation = textContent.Annotations.OfType().FirstOrDefault();
- Assert.NotNull(citation);
-
- Assert.Equal("machine learning requires large datasets", citation.Snippet);
- Assert.Equal("file_abc123", citation.FileId);
- Assert.Null(citation.Url);
- Assert.Null(citation.AnnotatedRegions);
}
[Fact]
- public async Task GetResponseAsync_FinishReasonNullHandling()
+ public async Task GetResponseAsync_SkipsEmptyTextReasoningContent()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [{
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Test"
- }]
- }]
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Question"
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Follow up"
+ }]
+ }
+ ]
}
""",
actualResponse: """
{
- "id": "msg_no_finish_01",
+ "id": "msg_skip_empty_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
@@ -3609,9 +3541,9 @@ public async Task GetResponseAsync_FinishReasonNullHandling()
"type": "text",
"text": "Response"
}],
- "stop_reason": null,
+ "stop_reason": "end_turn",
"usage": {
- "input_tokens": 10,
+ "input_tokens": 20,
"output_tokens": 5
}
}
@@ -3620,18 +3552,23 @@ public async Task GetResponseAsync_FinishReasonNullHandling()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ List messages =
+ [
+ new(ChatRole.User, "Question"),
+ new(ChatRole.Assistant, [new TextReasoningContent(null)]),
+ new(ChatRole.User, "Follow up"),
+ ];
+
ChatResponse response = await chatClient.GetResponseAsync(
- "Test",
+ messages,
new(),
TestContext.Current.CancellationToken
);
Assert.NotNull(response);
-
- Assert.Null(response.FinishReason);
}
[Fact]
- public async Task GetStreamingResponseAsync_AccumulatesUsageFromMultipleMessageStartEvents()
+ public async Task GetStreamingResponseAsync_WithThinkingBlockInStream()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
@@ -3642,7 +3579,7 @@ public async Task GetStreamingResponseAsync_AccumulatesUsageFromMultipleMessageS
"role": "user",
"content": [{
"type": "text",
- "text": "Test multiple message starts"
+ "text": "Analyze this problem"
}]
}],
"stream": true
@@ -3650,22 +3587,28 @@ public async Task GetStreamingResponseAsync_AccumulatesUsageFromMultipleMessageS
""",
actualResponse: """
event: message_start
- data: {"type":"message_start","message":{"id":"msg_multi_start_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
-
- event: message_start
- data: {"type":"message_start","message":{"id":"msg_multi_start_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":5,"output_tokens":0}}}
+ data: {"type":"message_start","message":{"id":"msg_thinking_stream_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
event: content_block_start
- data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":"","signature":"sig_123"}}
event: content_block_delta
- data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Response"}}
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":"Let me analyze this..."}}
event: content_block_stop
data: {"type":"content_block_stop","index":0}
+ event: content_block_start
+ data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":1,"delta":{"type":"text_delta","text":"Based on my analysis"}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":1}
+
event: message_delta
- data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":15,"output_tokens":2}}
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":10}}
event: message_stop
data: {"type":"message_stop"}
@@ -3678,7 +3621,7 @@ public async Task GetStreamingResponseAsync_AccumulatesUsageFromMultipleMessageS
List updates = [];
await foreach (
var update in chatClient.GetStreamingResponseAsync(
- "Test multiple message starts",
+ "Analyze this problem",
new(),
TestContext.Current.CancellationToken
)
@@ -3688,20 +3631,19 @@ var update in chatClient.GetStreamingResponseAsync(
}
Assert.NotEmpty(updates);
- var usageUpdates = updates.Where(u => u.Contents.Any(c => c is UsageContent)).ToList();
- Assert.NotEmpty(usageUpdates);
+ var reasoningUpdates = updates
+ .Where(u => u.Contents.Any(c => c is TextReasoningContent))
+ .ToList();
+ Assert.NotEmpty(reasoningUpdates);
- var usageContent = usageUpdates
- .SelectMany(u => u.Contents.OfType())
+ var reasoningContent = reasoningUpdates
+ .SelectMany(u => u.Contents.OfType())
.FirstOrDefault();
- Assert.NotNull(usageContent);
-
- Assert.Equal(15, usageContent.Details.InputTokenCount);
- Assert.Equal(2, usageContent.Details.OutputTokenCount);
+ Assert.NotNull(reasoningContent);
}
[Fact]
- public async Task GetStreamingResponseAsync_UsageFromDeltaOverridesStartEvent()
+ public async Task GetStreamingResponseAsync_WithRedactedThinkingBlockInStream()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
@@ -3712,7 +3654,7 @@ public async Task GetStreamingResponseAsync_UsageFromDeltaOverridesStartEvent()
"role": "user",
"content": [{
"type": "text",
- "text": "hello"
+ "text": "Test redacted thinking"
}]
}],
"stream": true
@@ -3720,19 +3662,25 @@ public async Task GetStreamingResponseAsync_UsageFromDeltaOverridesStartEvent()
""",
actualResponse: """
event: message_start
- data: {"type":"message_start","message":{"id":"msg_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":8,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":8}}}
+ data: {"type":"message_start","message":{"id":"msg_redacted_stream_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
event: content_block_start
- data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"redacted_thinking","data":"encrypted_xyz"}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0}
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}
event: content_block_delta
- data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello!"}}
+ data: {"type":"content_block_delta","index":1,"delta":{"type":"text_delta","text":"Response"}}
event: content_block_stop
- data: {"type":"content_block_stop","index":0}
+ data: {"type":"content_block_stop","index":1}
event: message_delta
- data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":8,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":12}}
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":5}}
event: message_stop
data: {"type":"message_stop"}
@@ -3745,7 +3693,7 @@ public async Task GetStreamingResponseAsync_UsageFromDeltaOverridesStartEvent()
List updates = [];
await foreach (
var update in chatClient.GetStreamingResponseAsync(
- "hello",
+ "Test redacted thinking",
new(),
TestContext.Current.CancellationToken
)
@@ -3755,184 +3703,126 @@ var update in chatClient.GetStreamingResponseAsync(
}
Assert.NotEmpty(updates);
- var usageUpdates = updates.Where(u => u.Contents.Any(c => c is UsageContent)).ToList();
- Assert.NotEmpty(usageUpdates);
+ var reasoningUpdates = updates
+ .Where(u => u.Contents.Any(c => c is TextReasoningContent))
+ .ToList();
+ Assert.NotEmpty(reasoningUpdates);
- var usageContent = usageUpdates
- .SelectMany(u => u.Contents.OfType())
+ var reasoningContent = reasoningUpdates
+ .SelectMany(u => u.Contents.OfType())
.FirstOrDefault();
- Assert.NotNull(usageContent);
-
- Assert.Equal(8, usageContent.Details.InputTokenCount);
- Assert.Equal(12, usageContent.Details.OutputTokenCount);
- Assert.Equal(20, usageContent.Details.TotalTokenCount);
+ Assert.NotNull(reasoningContent);
+ Assert.Equal(string.Empty, reasoningContent.Text);
+ Assert.Equal("encrypted_xyz", reasoningContent.ProtectedData);
}
[Fact]
- public async Task GetResponseAsync_FunctionResult_WithSingleTextContent()
+ public async Task GetStreamingResponseAsync_WithSignatureDeltaInStream()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "What's the weather in Seattle?"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "tool_use",
- "id": "toolu_012ji4C9Dx9qiGwDPfWSjRVC",
- "name": "get_weather",
- "input": {
- "location": "Seattle"
- }
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "tool_result",
- "tool_use_id": "toolu_012ji4C9Dx9qiGwDPfWSjRVC",
- "is_error": false,
- "content": [{
- "type": "text",
- "text": "The weather in Seattle is sunny and 72�F"
- }]
- }]
- }
- ]
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Test signature delta"
+ }]
+ }],
+ "stream": true
}
""",
actualResponse: """
- {
- "id": "msg_01DzfU3ta5z9nrJo6EGamXqV",
- "type": "message",
- "role": "assistant",
- "model": "claude-haiku-4-5-20251001",
- "content": [{
- "type": "text",
- "text": "The weather in Seattle is currently **sunny** with a temperature of **72�F** (about 22�C). Great weather for outdoor activities!"
- }],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 91,
- "output_tokens": 34
- }
- }
+ event: message_start
+ data: {"type":"message_start","message":{"id":"msg_sig_delta_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":"","signature":""}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":"Analyzing..."}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"signature_delta","signature":"sig_part1"}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"signature_delta","signature":"sig_part2"}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0}
+
+ event: message_delta
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":5}}
+
+ event: message_stop
+ data: {"type":"message_stop"}
+
"""
);
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List messages =
- [
- new(ChatRole.User, "What's the weather in Seattle?"),
- new(
- ChatRole.Assistant,
- [
- new FunctionCallContent(
- "toolu_012ji4C9Dx9qiGwDPfWSjRVC",
- "get_weather",
- new Dictionary { ["location"] = "Seattle" }
- ),
- ]
- ),
- new(
- ChatRole.User,
- [
- new FunctionResultContent(
- "toolu_012ji4C9Dx9qiGwDPfWSjRVC",
- new TextContent("The weather in Seattle is sunny and 72�F")
- ),
- ]
- ),
- ];
+ List updates = [];
+ await foreach (
+ var update in chatClient.GetStreamingResponseAsync(
+ "Test signature delta",
+ new(),
+ TestContext.Current.CancellationToken
+ )
+ )
+ {
+ updates.Add(update);
+ }
- ChatResponse response = await chatClient.GetResponseAsync(
- messages,
- new(),
- TestContext.Current.CancellationToken
- );
+ Assert.NotEmpty(updates);
+ var reasoningUpdates = updates
+ .Where(u => u.Contents.Any(c => c is TextReasoningContent))
+ .ToList();
+ Assert.NotEmpty(reasoningUpdates);
- Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("sunny", textContent.Text, StringComparison.OrdinalIgnoreCase);
- Assert.Contains("72", textContent.Text);
+ var allReasoningContent = reasoningUpdates
+ .SelectMany(u => u.Contents.OfType())
+ .ToList();
+ Assert.NotEmpty(allReasoningContent);
}
[Fact]
- public async Task GetResponseAsync_FunctionResult_WithMultipleTextContents()
+ public async Task GetResponseAsync_WithThinkingBlockInResponse()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Get me news about AI"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "tool_use",
- "id": "toolu_01TQkFntpAPUXLijpPu5Q1dT",
- "name": "get_news",
- "input": {
- "topic": "AI"
- }
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "tool_result",
- "tool_use_id": "toolu_01TQkFntpAPUXLijpPu5Q1dT",
- "is_error": false,
- "content": [
- {
- "type": "text",
- "text": "Breaking: AI advances"
- },
- {
- "type": "text",
- "text": "Research shows improvements"
- },
- {
- "type": "text",
- "text": "Industry adoption grows"
- }
- ]
- }]
- }
- ]
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "What is the answer?"
+ }]
+ }],
+ "max_tokens": 1024
}
""",
actualResponse: """
{
- "id": "msg_01G8oMUpScZWsMe5JsNuLkgJ",
+ "id": "msg_thinking_resp_01",
"type": "message",
"role": "assistant",
- "model": "claude-haiku-4-5-20251001",
+ "model": "claude-haiku-4-5",
"content": [{
+ "type": "thinking",
+ "thinking": "Let me think through this step by step...",
+ "signature": "sig_abc123"
+ }, {
"type": "text",
- "text": "Here's the latest AI news:\\n\\n**Breaking: AI Advances**\\n- Researchers are demonstrating significant improvements in AI capabilities across various domains\\n\\n**Research Shows Improvements**\\n- Ongoing studies continue to push the boundaries of what AI systems can accomplish\\n\\n**Industry Adoption Grows**\\n- Companies across sectors are increasingly implementing AI solutions into their operations"
+ "text": "Based on my analysis, the answer is 42."
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 95,
- "output_tokens": 100
+ "input_tokens": 20,
+ "output_tokens": 15
}
}
"""
@@ -3940,107 +3830,62 @@ public async Task GetResponseAsync_FunctionResult_WithMultipleTextContents()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List messages =
- [
- new(ChatRole.User, "Get me news about AI"),
- new(
- ChatRole.Assistant,
- [
- new FunctionCallContent(
- "toolu_01TQkFntpAPUXLijpPu5Q1dT",
- "get_news",
- new Dictionary { ["topic"] = "AI" }
- ),
- ]
- ),
- new(
- ChatRole.User,
- [
- new FunctionResultContent(
- "toolu_01TQkFntpAPUXLijpPu5Q1dT",
- new AIContent[]
- {
- new TextContent("Breaking: AI advances"),
- new TextContent("Research shows improvements"),
- new TextContent("Industry adoption grows"),
- }
- ),
- ]
- ),
- ];
-
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
+ "What is the answer?",
new(),
TestContext.Current.CancellationToken
);
-
Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("AI", textContent.Text);
- Assert.Contains("advances", textContent.Text, StringComparison.OrdinalIgnoreCase);
+
+ Assert.Equal(2, response.Messages[0].Contents.Count);
+
+ var reasoningContent = response
+ .Messages[0]
+ .Contents.OfType()
+ .FirstOrDefault();
+ Assert.NotNull(reasoningContent);
+ Assert.Equal("Let me think through this step by step...", reasoningContent.Text);
+ Assert.Equal("sig_abc123", reasoningContent.ProtectedData);
+
+ var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
+ Assert.NotNull(textContent);
+ Assert.Equal("Based on my analysis, the answer is 42.", textContent.Text);
}
[Fact]
- public async Task GetResponseAsync_FunctionResult_WithImageDataContent()
+ public async Task GetResponseAsync_WithRedactedThinkingBlockInResponse()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Generate a bar chart"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "tool_use",
- "id": "toolu_01RFvjHBAxq1z9kgH7vtVioW",
- "name": "generate_chart",
- "input": {
- "type": "bar"
- }
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "tool_result",
- "tool_use_id": "toolu_01RFvjHBAxq1z9kgH7vtVioW",
- "is_error": false,
- "content": [{
- "type": "image",
- "source": {
- "type": "base64",
- "media_type": "image/png",
- "data": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
- }
- }]
- }]
- }
- ]
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Tell me your conclusion"
+ }]
+ }],
+ "max_tokens": 1024
}
""",
actualResponse: """
{
- "id": "msg_01JVBwA4cipSnmopX4ywyZ36",
+ "id": "msg_redacted_resp_01",
"type": "message",
"role": "assistant",
- "model": "claude-haiku-4-5-20251001",
+ "model": "claude-haiku-4-5",
"content": [{
+ "type": "redacted_thinking",
+ "data": "encrypted_thinking_data_xyz"
+ }, {
"type": "text",
- "text": "I've generated a simple bar chart for you! \\n\\nSince you didn't specify particular data, here's a basic example. If you'd like me to create a bar chart with specific data, categories, or a particular theme, please let me know."
+ "text": "Here is my conclusion."
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 92,
- "output_tokens": 50
+ "input_tokens": 20,
+ "output_tokens": 10
}
}
"""
@@ -4048,105 +3893,66 @@ public async Task GetResponseAsync_FunctionResult_WithImageDataContent()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- byte[] pngData = Convert.FromBase64String(
- "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
- );
-
- List messages =
- [
- new(ChatRole.User, "Generate a bar chart"),
- new(
- ChatRole.Assistant,
- [
- new FunctionCallContent(
- "toolu_01RFvjHBAxq1z9kgH7vtVioW",
- "generate_chart",
- new Dictionary { ["type"] = "bar" }
- ),
- ]
- ),
- new(
- ChatRole.User,
- [
- new FunctionResultContent(
- "toolu_01RFvjHBAxq1z9kgH7vtVioW",
- new DataContent(pngData, "image/png")
- ),
- ]
- ),
- ];
-
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
+ "Tell me your conclusion",
new(),
TestContext.Current.CancellationToken
);
-
Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("chart", textContent.Text, StringComparison.OrdinalIgnoreCase);
+
+ Assert.Equal(2, response.Messages[0].Contents.Count);
+
+ var reasoningContent = response
+ .Messages[0]
+ .Contents.OfType()
+ .FirstOrDefault();
+ Assert.NotNull(reasoningContent);
+ Assert.Equal(string.Empty, reasoningContent.Text);
+ Assert.Equal("encrypted_thinking_data_xyz", reasoningContent.ProtectedData);
+
+ var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
+ Assert.NotNull(textContent);
+ Assert.Equal("Here is my conclusion.", textContent.Text);
}
[Fact]
- public async Task GetResponseAsync_FunctionResult_WithPdfDataContent()
+ public async Task GetResponseAsync_WithToolUseBlockInResponse()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Generate a sales report"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "tool_use",
- "id": "toolu_01Xp2XKeM6KcpCrGKbh96biN",
- "name": "generate_report",
- "input": {
- "type": "sales"
- }
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "tool_result",
- "tool_use_id": "toolu_01Xp2XKeM6KcpCrGKbh96biN",
- "is_error": false,
- "content": [{
- "type": "document",
- "source": {
- "type": "base64",
- "media_type": "application/pdf",
- "data": "JVBERi0xLjQKMSAwIG9iajw8L1R5cGUvQ2F0YWxvZy9QYWdlcyAyIDAgUj4+ZW5kb2JqIDIgMCBvYmo8PC9UeXBlL1BhZ2VzL0tpZHNbMyAwIFJdL0NvdW50IDE+PmVuZG9iaiAzIDAgb2JqPDwvVHlwZS9QYWdlL01lZGlhQm94WzAgMCA2MTIgNzkyXS9QYXJlbnQgMiAwIFIvUmVzb3VyY2VzPDw+Pj4+ZW5kb2JqCnhyZWYKMCA0CjAwMDAwMDAwMDAgNjU1MzUgZgowMDAwMDAwMDA5IDAwMDAwIG4KMDAwMDAwMDA1MiAwMDAwMCBuCjAwMDAwMDAxMDEgMDAwMDAgbgp0cmFpbGVyPDwvU2l6ZSA0L1Jvb3QgMSAwIFI+PgpzdGFydHhyZWYKMTc4CiUlRU9G"
- }
- }]
- }]
- }
- ]
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "What is 6 times 7?"
+ }]
+ }],
+ "max_tokens": 1024
}
""",
actualResponse: """
{
- "id": "msg_01WhQmBXmH4zHd1fB2VYGRWW",
+ "id": "msg_tooluse_resp_01",
"type": "message",
"role": "assistant",
- "model": "claude-haiku-4-5-20251001",
+ "model": "claude-haiku-4-5",
"content": [{
- "type": "text",
- "text": "I attempted to generate a sales report, but the generated document appears to be blank. Let me provide you with a sample **Sales Report** instead with key metrics and insights."
+ "type": "tool_use",
+ "id": "toolu_detailed_01",
+ "name": "calculate",
+ "input": {
+ "operation": "multiply",
+ "a": 6,
+ "b": 7
+ },
+ "caller": {"type": "direct"}
}],
- "stop_reason": "end_turn",
+ "stop_reason": "tool_use",
"usage": {
- "input_tokens": 1653,
- "output_tokens": 50
+ "input_tokens": 30,
+ "output_tokens": 15
}
}
"""
@@ -4154,111 +3960,76 @@ public async Task GetResponseAsync_FunctionResult_WithPdfDataContent()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- string pdfContent =
- "%PDF-1.4\n1 0 obj<>endobj 2 0 obj<>endobj 3 0 obj<>>>endobj\nxref\n0 4\n0000000000 65535 f\n0000000009 00000 n\n0000000052 00000 n\n0000000101 00000 n\ntrailer<>\nstartxref\n178\n%%EOF";
- byte[] pdfData = Encoding.UTF8.GetBytes(pdfContent);
+ var calcFunction = AIFunctionFactory.Create(
+ (string operation, int a, int b) =>
+ {
+ return operation == "multiply" ? a * b : 0;
+ },
+ "calculate"
+ );
- List messages =
- [
- new(ChatRole.User, "Generate a sales report"),
- new(
- ChatRole.Assistant,
- [
- new FunctionCallContent(
- "toolu_01Xp2XKeM6KcpCrGKbh96biN",
- "generate_report",
- new Dictionary { ["type"] = "sales" }
- ),
- ]
- ),
- new(
- ChatRole.User,
- [
- new FunctionResultContent(
- "toolu_01Xp2XKeM6KcpCrGKbh96biN",
- new DataContent(pdfData, "application/pdf")
- ),
- ]
- ),
- ];
+ ChatOptions options = new() { Tools = [calcFunction] };
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
+ "What is 6 times 7?",
new(),
TestContext.Current.CancellationToken
);
-
Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("report", textContent.Text, StringComparison.OrdinalIgnoreCase);
- }
- [Fact]
- public async Task GetResponseAsync_FunctionResult_WithTextPlainDataContent()
- {
+ Assert.Equal(ChatFinishReason.ToolCalls, response.FinishReason);
+
+ var functionCall = response
+ .Messages[0]
+ .Contents.OfType()
+ .FirstOrDefault();
+ Assert.NotNull(functionCall);
+ Assert.Equal("calculate", functionCall.Name);
+ Assert.Equal("toolu_detailed_01", functionCall.CallId);
+ Assert.NotNull(functionCall.Arguments);
+ Assert.True(functionCall.Arguments.ContainsKey("operation"));
+ Assert.Equal("multiply", functionCall.Arguments["operation"]?.ToString());
+ Assert.True(functionCall.Arguments.ContainsKey("a"));
+ Assert.True(functionCall.Arguments.ContainsKey("b"));
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithHostedWebSearchToolOptionTriggersConversion()
+ {
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Get the system logs"
- }]
- },
- {
- "role": "assistant",
- "content": [
- {
- "type": "text",
- "text": "I'll retrieve the system logs for you."
- },
- {
- "type": "tool_use",
- "id": "toolu_01JqNHMtbwFQExUwDMWy3wHe",
- "name": "get_logs",
- "input": {
- "type": "system"
- }
- }
- ]
- },
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Find recent news about AI"
+ }]
+ }],
+ "max_tokens": 1024,
+ "tools": [
{
- "role": "user",
- "content": [{
- "type": "tool_result",
- "tool_use_id": "toolu_01JqNHMtbwFQExUwDMWy3wHe",
- "is_error": false,
- "content": [{
- "type": "document",
- "source": {
- "type": "text",
- "media_type": "text/plain",
- "data": "Log Entry 1: System started\nLog Entry 2: Processing data\nLog Entry 3: Task completed"
- }
- }]
- }]
+ "name": "web_search",
+ "type": "web_search_20250305"
}
]
}
""",
actualResponse: """
{
- "id": "msg_01RxuuTbpsvFyNpim6uoXujV",
+ "id": "msg_websearch_opt_01",
"type": "message",
"role": "assistant",
- "model": "claude-haiku-4-5-20251001",
+ "model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "Here are the system logs:\\n\\n**System Logs:**\\n1. System started\\n2. Processing data\\n3. Task completed\\n\\nThese are the current entries in the system log."
+ "text": "I'll search for that information."
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 148,
- "output_tokens": 50
+ "input_tokens": 20,
+ "output_tokens": 10
}
}
"""
@@ -4266,113 +4037,47 @@ public async Task GetResponseAsync_FunctionResult_WithTextPlainDataContent()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- string logContent =
- "Log Entry 1: System started\nLog Entry 2: Processing data\nLog Entry 3: Task completed";
- byte[] logData = Encoding.UTF8.GetBytes(logContent);
-
- List messages =
- [
- new(ChatRole.User, "Get the system logs"),
- new(
- ChatRole.Assistant,
- [
- new TextContent("I'll retrieve the system logs for you."),
- new FunctionCallContent(
- "toolu_01JqNHMtbwFQExUwDMWy3wHe",
- "get_logs",
- new Dictionary { ["type"] = "system" }
- ),
- ]
- ),
- new(
- ChatRole.User,
- [
- new FunctionResultContent(
- "toolu_01JqNHMtbwFQExUwDMWy3wHe",
- new DataContent(logData, "text/plain")
- ),
- ]
- ),
- ];
+ ChatOptions options = new() { Tools = [new HostedWebSearchTool()] };
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
- new(),
+ "Find recent news about AI",
+ options,
TestContext.Current.CancellationToken
);
-
Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("System started", textContent.Text);
- Assert.Contains("Task completed", textContent.Text);
}
[Fact]
- public async Task GetResponseAsync_FunctionResult_WithMixedContent()
+ public async Task GetResponseAsync_WithTextBlockWithoutCitations()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Analyze the sales data"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "tool_use",
- "id": "toolu_01ABC123",
- "name": "analyze_data",
- "input": {
- "dataset": "sales"
- }
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "tool_result",
- "tool_use_id": "toolu_01ABC123",
- "is_error": false,
- "content": [
- {
- "type": "text",
- "text": "Analysis: Mean=42.5, Median=40"
- },
- {
- "type": "image",
- "source": {
- "type": "base64",
- "media_type": "image/png",
- "data": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
- }
- }
- ]
- }]
- }
- ]
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Tell me about AI"
+ }]
+ }],
+ "max_tokens": 1024
}
""",
actualResponse: """
{
- "id": "msg_01MixedContent",
+ "id": "msg_no_citations_01",
"type": "message",
"role": "assistant",
- "model": "claude-haiku-4-5-20251001",
+ "model": "claude-haiku-4-5",
"content": [{
"type": "text",
- "text": "Based on the analysis, your sales data shows a mean of 42.5 and median of 40. The chart visualization helps illustrate the distribution."
+ "text": "AI is transforming the world."
}],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 120,
- "output_tokens": 35
+ "input_tokens": 20,
+ "output_tokens": 10
}
}
"""
@@ -4380,107 +4085,3383 @@ public async Task GetResponseAsync_FunctionResult_WithMixedContent()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- byte[] chartData = Convert.FromBase64String(
- "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
- );
-
- List messages =
- [
- new(ChatRole.User, "Analyze the sales data"),
- new(
- ChatRole.Assistant,
- [
- new FunctionCallContent(
- "toolu_01ABC123",
- "analyze_data",
- new Dictionary { ["dataset"] = "sales" }
- ),
- ]
- ),
- new(
- ChatRole.User,
- [
- new FunctionResultContent(
- "toolu_01ABC123",
- new AIContent[]
- {
- new TextContent("Analysis: Mean=42.5, Median=40"),
- new DataContent(chartData, "image/png"),
- }
- ),
- ]
- ),
- ];
-
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
+ "Tell me about AI",
new(),
TestContext.Current.CancellationToken
);
-
Assert.NotNull(response);
- TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
- Assert.Contains("42.5", textContent.Text);
- Assert.Contains("40", textContent.Text);
+
+ var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
+ Assert.NotNull(textContent);
+ Assert.Equal("AI is transforming the world.", textContent.Text);
+ Assert.True(textContent.Annotations is null || !textContent.Annotations.Any());
}
[Fact]
- public async Task GetResponseAsync_WithFunctionResultContent_UriContent_Image()
+ public async Task GetResponseAsync_WithWebSearchCitationsInTextBlock()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
- "max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Get image URL"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "tool_use",
- "id": "tool_uri_img",
- "name": "url_tool",
- "input": {}
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "tool_result",
- "tool_use_id": "tool_uri_img",
- "is_error": false,
- "content": [{
- "type": "image",
- "source": {
- "type": "url",
- "url": "https://example.com/image.png"
- }
- }]
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Tell me about recent AI developments with sources"
+ }]
+ }],
+ "max_tokens": 1024
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_with_citations_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "According to recent research [1], artificial intelligence is rapidly advancing.",
+ "citations": [{
+ "type": "web_search_result_location",
+ "cited_text": "artificial intelligence is rapidly advancing",
+ "title": "AI Research 2024",
+ "url": "https://example.com/ai-research",
+ "encrypted_index": "enc_idx_123"
+ }]
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 25,
+ "output_tokens": 18
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Tell me about recent AI developments with sources",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
+ Assert.NotNull(textContent);
+ Assert.Contains("artificial intelligence", textContent.Text);
+ Assert.NotNull(textContent.Annotations);
+ Assert.NotEmpty(textContent.Annotations);
+
+ var citation = textContent.Annotations.OfType().FirstOrDefault();
+ Assert.NotNull(citation);
+ Assert.Equal("AI Research 2024", citation.Title);
+ Assert.Equal("artificial intelligence is rapidly advancing", citation.Snippet);
+ Assert.NotNull(citation.Url);
+ Assert.Equal("https://example.com/ai-research", citation.Url.ToString());
+ Assert.Null(citation.AnnotatedRegions);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithContentBlockLocationCitationsInTextBlock()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "What does the document say about ML?"
+ }]
+ }],
+ "max_tokens": 1024
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_doc_citations_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "As stated in the document [1], machine learning requires large datasets.",
+ "citations": [{
+ "type": "content_block_location",
+ "cited_text": "machine learning requires large datasets",
+ "document_title": "ML Fundamentals",
+ "file_id": "file_abc123",
+ "document_index": 0,
+ "start_block_index": 15,
+ "end_block_index": 45
+ }]
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 30,
+ "output_tokens": 20
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "What does the document say about ML?",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var textContent = response.Messages[0].Contents.OfType().FirstOrDefault();
+ Assert.NotNull(textContent);
+ Assert.NotNull(textContent.Annotations);
+ Assert.NotEmpty(textContent.Annotations);
+
+ var citation = textContent.Annotations.OfType().FirstOrDefault();
+ Assert.NotNull(citation);
+
+ Assert.Equal("machine learning requires large datasets", citation.Snippet);
+ Assert.Equal("file_abc123", citation.FileId);
+ Assert.Null(citation.Url);
+ Assert.Null(citation.AnnotatedRegions);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ServerToolUseBlock_WebSearch_MapsToWebSearchToolCallContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Search for AI news"
+ }]
+ }],
+ "max_tokens": 1024,
+ "tools": [{
+ "name": "web_search",
+ "type": "web_search_20250305"
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_ws_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_ws_01",
+ "name": "web_search",
+ "caller": { "type": "direct" },
+ "input": { "query": "latest AI news 2026" }
+ },
+ {
+ "type": "web_search_tool_result",
+ "tool_use_id": "srvtoolu_ws_01",
+ "caller": { "type": "direct" },
+ "content": [
+ {
+ "type": "web_search_result",
+ "title": "AI Breakthroughs in 2026",
+ "url": "https://example.com/ai-news",
+ "encrypted_content": "enc_abc123",
+ "page_age": "2 days ago"
+ },
+ {
+ "type": "web_search_result",
+ "title": "Latest AI Research",
+ "url": "https://example.com/ai-research",
+ "encrypted_content": "enc_def456",
+ "page_age": "1 week ago"
+ }
+ ]
+ },
+ {
+ "type": "text",
+ "text": "Here are some recent AI news articles."
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 30,
+ "output_tokens": 25
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Search for AI news",
+ new() { Tools = [new HostedWebSearchTool()] },
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+
+ // Verify WebSearchToolCallContent
+ var wsCall = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_ws_01", wsCall.CallId);
+ Assert.NotNull(wsCall.Queries);
+ Assert.Single(wsCall.Queries);
+ Assert.Equal("latest AI news 2026", wsCall.Queries[0]);
+
+ // Verify WebSearchToolResultContent
+ var wsResult = Assert.IsType(contents[1]);
+ Assert.Equal("srvtoolu_ws_01", wsResult.CallId);
+ Assert.NotNull(wsResult.Results);
+ Assert.Equal(2, wsResult.Results.Count);
+
+ var firstResult = Assert.IsType(wsResult.Results[0]);
+ Assert.Equal(new Uri("https://example.com/ai-news"), firstResult.Uri);
+
+ var secondResult = Assert.IsType(wsResult.Results[1]);
+ Assert.Equal(new Uri("https://example.com/ai-research"), secondResult.Uri);
+
+ // Verify text content
+ var text = Assert.IsType(contents[2]);
+ Assert.Equal("Here are some recent AI news articles.", text.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ServerToolUseBlock_CodeExecution_MapsToCodeInterpreterToolCallContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Compute 2**10"
+ }]
+ }],
+ "max_tokens": 1024,
+ "tools": [{
+ "type": "code_execution_20250825",
+ "name": "code_execution"
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_ce_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_ce_01",
+ "name": "code_execution",
+ "caller": { "type": "direct" },
+ "input": { "code": "print(2**10)" }
+ },
+ {
+ "type": "code_execution_tool_result",
+ "tool_use_id": "srvtoolu_ce_01",
+ "content": {
+ "type": "code_execution_result",
+ "content": [],
+ "stdout": "1024\n",
+ "stderr": "",
+ "return_code": 0
+ }
+ },
+ {
+ "type": "text",
+ "text": "The result is 1024."
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 25,
+ "output_tokens": 20
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Compute 2**10",
+ new() { Tools = [new HostedCodeInterpreterTool()] },
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+
+ // Verify CodeInterpreterToolCallContent
+ var ciCall = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_ce_01", ciCall.CallId);
+ Assert.NotNull(ciCall.Inputs);
+ Assert.Single(ciCall.Inputs);
+ var codeInput = Assert.IsType(ciCall.Inputs[0]);
+ Assert.Equal("text/x-python", codeInput.MediaType);
+ Assert.Equal("print(2**10)", Encoding.UTF8.GetString(codeInput.Data.ToArray()));
+
+ // Verify CodeInterpreterToolResultContent
+ var ciResult = Assert.IsType(contents[1]);
+ Assert.Equal("srvtoolu_ce_01", ciResult.CallId);
+ Assert.NotNull(ciResult.Outputs);
+ var stdoutOutput = Assert.IsType(ciResult.Outputs[0]);
+ Assert.Equal("1024\n", stdoutOutput.Text);
+
+ // Verify text content
+ var text = Assert.IsType(contents[2]);
+ Assert.Equal("The result is 1024.", text.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WebSearchToolResult_WithError_MapsToErrorContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Search for something"
+ }]
+ }],
+ "max_tokens": 1024,
+ "tools": [{
+ "name": "web_search",
+ "type": "web_search_20250305"
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_ws_err_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_ws_err_01",
+ "name": "web_search",
+ "caller": { "type": "direct" },
+ "input": { "query": "test query" }
+ },
+ {
+ "type": "web_search_tool_result",
+ "tool_use_id": "srvtoolu_ws_err_01",
+ "caller": { "type": "direct" },
+ "content": {
+ "type": "web_search_tool_result_error",
+ "error_code": "max_uses_exceeded"
+ }
+ },
+ {
+ "type": "text",
+ "text": "Search encountered an error."
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 20,
+ "output_tokens": 15
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Search for something",
+ new() { Tools = [new HostedWebSearchTool()] },
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+
+ var wsResult = Assert.IsType(contents[1]);
+ Assert.Equal("srvtoolu_ws_err_01", wsResult.CallId);
+ Assert.NotNull(wsResult.Results);
+ Assert.Single(wsResult.Results);
+ var errorResult = Assert.IsType(wsResult.Results[0]);
+ Assert.Equal("MaxUsesExceeded", errorResult.ErrorCode);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_FinishReasonNullHandling()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Test"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_no_finish_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Response"
+ }],
+ "stop_reason": null,
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Test",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ Assert.Null(response.FinishReason);
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_AccumulatesUsageFromMultipleMessageStartEvents()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Test multiple message starts"
+ }]
+ }],
+ "stream": true
+ }
+ """,
+ actualResponse: """
+ event: message_start
+ data: {"type":"message_start","message":{"id":"msg_multi_start_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
+
+ event: message_start
+ data: {"type":"message_start","message":{"id":"msg_multi_start_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":5,"output_tokens":0}}}
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Response"}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0}
+
+ event: message_delta
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":15,"output_tokens":2}}
+
+ event: message_stop
+ data: {"type":"message_stop"}
+
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ List updates = [];
+ await foreach (
+ var update in chatClient.GetStreamingResponseAsync(
+ "Test multiple message starts",
+ new(),
+ TestContext.Current.CancellationToken
+ )
+ )
+ {
+ updates.Add(update);
+ }
+
+ Assert.NotEmpty(updates);
+ var usageUpdates = updates.Where(u => u.Contents.Any(c => c is UsageContent)).ToList();
+ Assert.NotEmpty(usageUpdates);
+
+ var usageContent = usageUpdates
+ .SelectMany(u => u.Contents.OfType())
+ .FirstOrDefault();
+ Assert.NotNull(usageContent);
+
+ Assert.Equal(15, usageContent.Details.InputTokenCount);
+ Assert.Equal(2, usageContent.Details.OutputTokenCount);
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_UsageFromDeltaOverridesStartEvent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "hello"
+ }]
+ }],
+ "stream": true
+ }
+ """,
+ actualResponse: """
+ event: message_start
+ data: {"type":"message_start","message":{"id":"msg_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":8,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":8}}}
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello!"}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0}
+
+ event: message_delta
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":8,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":12}}
+
+ event: message_stop
+ data: {"type":"message_stop"}
+
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ List updates = [];
+ await foreach (
+ var update in chatClient.GetStreamingResponseAsync(
+ "hello",
+ new(),
+ TestContext.Current.CancellationToken
+ )
+ )
+ {
+ updates.Add(update);
+ }
+
+ Assert.NotEmpty(updates);
+ var usageUpdates = updates.Where(u => u.Contents.Any(c => c is UsageContent)).ToList();
+ Assert.NotEmpty(usageUpdates);
+
+ var usageContent = usageUpdates
+ .SelectMany(u => u.Contents.OfType())
+ .FirstOrDefault();
+ Assert.NotNull(usageContent);
+
+ Assert.Equal(8, usageContent.Details.InputTokenCount);
+ Assert.Equal(12, usageContent.Details.OutputTokenCount);
+ Assert.Equal(20, usageContent.Details.TotalTokenCount);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_FunctionResult_WithSingleTextContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "What's the weather in Seattle?"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "tool_use",
+ "id": "toolu_012ji4C9Dx9qiGwDPfWSjRVC",
+ "name": "get_weather",
+ "input": {
+ "location": "Seattle"
+ }
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "tool_result",
+ "tool_use_id": "toolu_012ji4C9Dx9qiGwDPfWSjRVC",
+ "is_error": false,
+ "content": [{
+ "type": "text",
+ "text": "The weather in Seattle is sunny and 72�F"
+ }]
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_01DzfU3ta5z9nrJo6EGamXqV",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5-20251001",
+ "content": [{
+ "type": "text",
+ "text": "The weather in Seattle is currently **sunny** with a temperature of **72�F** (about 22�C). Great weather for outdoor activities!"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 91,
+ "output_tokens": 34
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ List messages =
+ [
+ new(ChatRole.User, "What's the weather in Seattle?"),
+ new(
+ ChatRole.Assistant,
+ [
+ new FunctionCallContent(
+ "toolu_012ji4C9Dx9qiGwDPfWSjRVC",
+ "get_weather",
+ new Dictionary { ["location"] = "Seattle" }
+ ),
+ ]
+ ),
+ new(
+ ChatRole.User,
+ [
+ new FunctionResultContent(
+ "toolu_012ji4C9Dx9qiGwDPfWSjRVC",
+ new TextContent("The weather in Seattle is sunny and 72�F")
+ ),
+ ]
+ ),
+ ];
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ messages,
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("sunny", textContent.Text, StringComparison.OrdinalIgnoreCase);
+ Assert.Contains("72", textContent.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_FunctionResult_WithMultipleTextContents()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Get me news about AI"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "tool_use",
+ "id": "toolu_01TQkFntpAPUXLijpPu5Q1dT",
+ "name": "get_news",
+ "input": {
+ "topic": "AI"
+ }
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "tool_result",
+ "tool_use_id": "toolu_01TQkFntpAPUXLijpPu5Q1dT",
+ "is_error": false,
+ "content": [
+ {
+ "type": "text",
+ "text": "Breaking: AI advances"
+ },
+ {
+ "type": "text",
+ "text": "Research shows improvements"
+ },
+ {
+ "type": "text",
+ "text": "Industry adoption grows"
+ }
+ ]
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_01G8oMUpScZWsMe5JsNuLkgJ",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5-20251001",
+ "content": [{
+ "type": "text",
+ "text": "Here's the latest AI news:\\n\\n**Breaking: AI Advances**\\n- Researchers are demonstrating significant improvements in AI capabilities across various domains\\n\\n**Research Shows Improvements**\\n- Ongoing studies continue to push the boundaries of what AI systems can accomplish\\n\\n**Industry Adoption Grows**\\n- Companies across sectors are increasingly implementing AI solutions into their operations"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 95,
+ "output_tokens": 100
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ List messages =
+ [
+ new(ChatRole.User, "Get me news about AI"),
+ new(
+ ChatRole.Assistant,
+ [
+ new FunctionCallContent(
+ "toolu_01TQkFntpAPUXLijpPu5Q1dT",
+ "get_news",
+ new Dictionary { ["topic"] = "AI" }
+ ),
+ ]
+ ),
+ new(
+ ChatRole.User,
+ [
+ new FunctionResultContent(
+ "toolu_01TQkFntpAPUXLijpPu5Q1dT",
+ new AIContent[]
+ {
+ new TextContent("Breaking: AI advances"),
+ new TextContent("Research shows improvements"),
+ new TextContent("Industry adoption grows"),
+ }
+ ),
+ ]
+ ),
+ ];
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ messages,
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("AI", textContent.Text);
+ Assert.Contains("advances", textContent.Text, StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_FunctionResult_WithImageDataContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Generate a bar chart"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "tool_use",
+ "id": "toolu_01RFvjHBAxq1z9kgH7vtVioW",
+ "name": "generate_chart",
+ "input": {
+ "type": "bar"
+ }
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "tool_result",
+ "tool_use_id": "toolu_01RFvjHBAxq1z9kgH7vtVioW",
+ "is_error": false,
+ "content": [{
+ "type": "image",
+ "source": {
+ "type": "base64",
+ "media_type": "image/png",
+ "data": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
+ }
+ }]
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_01JVBwA4cipSnmopX4ywyZ36",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5-20251001",
+ "content": [{
+ "type": "text",
+ "text": "I've generated a simple bar chart for you! \\n\\nSince you didn't specify particular data, here's a basic example. If you'd like me to create a bar chart with specific data, categories, or a particular theme, please let me know."
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 92,
+ "output_tokens": 50
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ byte[] pngData = Convert.FromBase64String(
+ "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
+ );
+
+ List messages =
+ [
+ new(ChatRole.User, "Generate a bar chart"),
+ new(
+ ChatRole.Assistant,
+ [
+ new FunctionCallContent(
+ "toolu_01RFvjHBAxq1z9kgH7vtVioW",
+ "generate_chart",
+ new Dictionary { ["type"] = "bar" }
+ ),
+ ]
+ ),
+ new(
+ ChatRole.User,
+ [
+ new FunctionResultContent(
+ "toolu_01RFvjHBAxq1z9kgH7vtVioW",
+ new DataContent(pngData, "image/png")
+ ),
+ ]
+ ),
+ ];
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ messages,
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("chart", textContent.Text, StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_FunctionResult_WithPdfDataContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Generate a sales report"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "tool_use",
+ "id": "toolu_01Xp2XKeM6KcpCrGKbh96biN",
+ "name": "generate_report",
+ "input": {
+ "type": "sales"
+ }
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "tool_result",
+ "tool_use_id": "toolu_01Xp2XKeM6KcpCrGKbh96biN",
+ "is_error": false,
+ "content": [{
+ "type": "document",
+ "source": {
+ "type": "base64",
+ "media_type": "application/pdf",
+ "data": "JVBERi0xLjQKMSAwIG9iajw8L1R5cGUvQ2F0YWxvZy9QYWdlcyAyIDAgUj4+ZW5kb2JqIDIgMCBvYmo8PC9UeXBlL1BhZ2VzL0tpZHNbMyAwIFJdL0NvdW50IDE+PmVuZG9iaiAzIDAgb2JqPDwvVHlwZS9QYWdlL01lZGlhQm94WzAgMCA2MTIgNzkyXS9QYXJlbnQgMiAwIFIvUmVzb3VyY2VzPDw+Pj4+ZW5kb2JqCnhyZWYKMCA0CjAwMDAwMDAwMDAgNjU1MzUgZgowMDAwMDAwMDA5IDAwMDAwIG4KMDAwMDAwMDA1MiAwMDAwMCBuCjAwMDAwMDAxMDEgMDAwMDAgbgp0cmFpbGVyPDwvU2l6ZSA0L1Jvb3QgMSAwIFI+PgpzdGFydHhyZWYKMTc4CiUlRU9G"
+ }
+ }]
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_01WhQmBXmH4zHd1fB2VYGRWW",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5-20251001",
+ "content": [{
+ "type": "text",
+ "text": "I attempted to generate a sales report, but the generated document appears to be blank. Let me provide you with a sample **Sales Report** instead with key metrics and insights."
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 1653,
+ "output_tokens": 50
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ string pdfContent =
+ "%PDF-1.4\n1 0 obj<>endobj 2 0 obj<>endobj 3 0 obj<>>>endobj\nxref\n0 4\n0000000000 65535 f\n0000000009 00000 n\n0000000052 00000 n\n0000000101 00000 n\ntrailer<>\nstartxref\n178\n%%EOF";
+ byte[] pdfData = Encoding.UTF8.GetBytes(pdfContent);
+
+ List messages =
+ [
+ new(ChatRole.User, "Generate a sales report"),
+ new(
+ ChatRole.Assistant,
+ [
+ new FunctionCallContent(
+ "toolu_01Xp2XKeM6KcpCrGKbh96biN",
+ "generate_report",
+ new Dictionary { ["type"] = "sales" }
+ ),
+ ]
+ ),
+ new(
+ ChatRole.User,
+ [
+ new FunctionResultContent(
+ "toolu_01Xp2XKeM6KcpCrGKbh96biN",
+ new DataContent(pdfData, "application/pdf")
+ ),
+ ]
+ ),
+ ];
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ messages,
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("report", textContent.Text, StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_FunctionResult_WithTextPlainDataContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Get the system logs"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [
+ {
+ "type": "text",
+ "text": "I'll retrieve the system logs for you."
+ },
+ {
+ "type": "tool_use",
+ "id": "toolu_01JqNHMtbwFQExUwDMWy3wHe",
+ "name": "get_logs",
+ "input": {
+ "type": "system"
+ }
+ }
+ ]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "tool_result",
+ "tool_use_id": "toolu_01JqNHMtbwFQExUwDMWy3wHe",
+ "is_error": false,
+ "content": [{
+ "type": "document",
+ "source": {
+ "type": "text",
+ "media_type": "text/plain",
+ "data": "Log Entry 1: System started\nLog Entry 2: Processing data\nLog Entry 3: Task completed"
+ }
+ }]
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_01RxuuTbpsvFyNpim6uoXujV",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5-20251001",
+ "content": [{
+ "type": "text",
+ "text": "Here are the system logs:\\n\\n**System Logs:**\\n1. System started\\n2. Processing data\\n3. Task completed\\n\\nThese are the current entries in the system log."
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 148,
+ "output_tokens": 50
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ string logContent =
+ "Log Entry 1: System started\nLog Entry 2: Processing data\nLog Entry 3: Task completed";
+ byte[] logData = Encoding.UTF8.GetBytes(logContent);
+
+ List messages =
+ [
+ new(ChatRole.User, "Get the system logs"),
+ new(
+ ChatRole.Assistant,
+ [
+ new TextContent("I'll retrieve the system logs for you."),
+ new FunctionCallContent(
+ "toolu_01JqNHMtbwFQExUwDMWy3wHe",
+ "get_logs",
+ new Dictionary { ["type"] = "system" }
+ ),
+ ]
+ ),
+ new(
+ ChatRole.User,
+ [
+ new FunctionResultContent(
+ "toolu_01JqNHMtbwFQExUwDMWy3wHe",
+ new DataContent(logData, "text/plain")
+ ),
+ ]
+ ),
+ ];
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ messages,
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("System started", textContent.Text);
+ Assert.Contains("Task completed", textContent.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_FunctionResult_WithMixedContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Analyze the sales data"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "tool_use",
+ "id": "toolu_01ABC123",
+ "name": "analyze_data",
+ "input": {
+ "dataset": "sales"
+ }
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "tool_result",
+ "tool_use_id": "toolu_01ABC123",
+ "is_error": false,
+ "content": [
+ {
+ "type": "text",
+ "text": "Analysis: Mean=42.5, Median=40"
+ },
+ {
+ "type": "image",
+ "source": {
+ "type": "base64",
+ "media_type": "image/png",
+ "data": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
+ }
+ }
+ ]
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_01MixedContent",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5-20251001",
+ "content": [{
+ "type": "text",
+ "text": "Based on the analysis, your sales data shows a mean of 42.5 and median of 40. The chart visualization helps illustrate the distribution."
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 120,
+ "output_tokens": 35
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ byte[] chartData = Convert.FromBase64String(
+ "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
+ );
+
+ List messages =
+ [
+ new(ChatRole.User, "Analyze the sales data"),
+ new(
+ ChatRole.Assistant,
+ [
+ new FunctionCallContent(
+ "toolu_01ABC123",
+ "analyze_data",
+ new Dictionary { ["dataset"] = "sales" }
+ ),
+ ]
+ ),
+ new(
+ ChatRole.User,
+ [
+ new FunctionResultContent(
+ "toolu_01ABC123",
+ new AIContent[]
+ {
+ new TextContent("Analysis: Mean=42.5, Median=40"),
+ new DataContent(chartData, "image/png"),
+ }
+ ),
+ ]
+ ),
+ ];
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ messages,
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("42.5", textContent.Text);
+ Assert.Contains("40", textContent.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithFunctionResultContent_UriContent_Image()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Get image URL"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "tool_use",
+ "id": "tool_uri_img",
+ "name": "url_tool",
+ "input": {}
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "tool_result",
+ "tool_use_id": "tool_uri_img",
+ "is_error": false,
+ "content": [{
+ "type": "image",
+ "source": {
+ "type": "url",
+ "url": "https://example.com/image.png"
+ }
+ }]
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_uri_img_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Image URL received"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 32,
+ "output_tokens": 8
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ List messages =
+ [
+ new ChatMessage(ChatRole.User, "Get image URL"),
+ new ChatMessage(
+ ChatRole.Assistant,
+ [
+ new FunctionCallContent(
+ "tool_uri_img",
+ "url_tool",
+ new Dictionary()
+ ),
+ ]
+ ),
+ new ChatMessage(
+ ChatRole.User,
+ [
+ new FunctionResultContent(
+ "tool_uri_img",
+ new UriContent(new Uri("https://example.com/image.png"), "image/png")
+ ),
+ ]
+ ),
+ ];
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ messages,
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithFunctionResultContent_UriContent_PDF()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [
+ {
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Get PDF URL"
+ }]
+ },
+ {
+ "role": "assistant",
+ "content": [{
+ "type": "tool_use",
+ "id": "tool_uri_pdf",
+ "name": "pdf_url_tool",
+ "input": {}
+ }]
+ },
+ {
+ "role": "user",
+ "content": [{
+ "type": "tool_result",
+ "tool_use_id": "tool_uri_pdf",
+ "is_error": false,
+ "content": [{
+ "type": "document",
+ "source": {
+ "type": "url",
+ "url": "https://example.com/document.pdf"
+ }
+ }]
+ }]
+ }
+ ]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_uri_pdf_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "PDF URL received"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 35,
+ "output_tokens": 9
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ List messages =
+ [
+ new ChatMessage(ChatRole.User, "Get PDF URL"),
+ new ChatMessage(
+ ChatRole.Assistant,
+ [
+ new FunctionCallContent(
+ "tool_uri_pdf",
+ "pdf_url_tool",
+ new Dictionary()
+ ),
+ ]
+ ),
+ new ChatMessage(
+ ChatRole.User,
+ [
+ new FunctionResultContent(
+ "tool_uri_pdf",
+ new UriContent(
+ new Uri("https://example.com/document.pdf"),
+ "application/pdf"
+ )
+ ),
+ ]
+ ),
+ ];
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ messages,
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithSimpleResponseFormat_ReturnsStructuredJSON()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-sonnet-4-5-20250929",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Tell me about Albert Einstein. Respond with his name and age at death."
+ }]
+ }],
+ "output_config": {
+ "format": {
+ "type": "json_schema",
+ "schema": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "age": { "type": "integer" }
+ },
+ "required": ["name", "age"],
+ "additionalProperties": false
+ }
+ }
+ }
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_format_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-sonnet-4-5-20250929",
+ "content": [{
+ "type": "text",
+ "text": "{\"name\":\"Albert Einstein\",\"age\":76}"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 25,
+ "output_tokens": 15
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-sonnet-4-5-20250929");
+
+ ChatOptions options = new()
+ {
+ ResponseFormat = ChatResponseFormat.ForJsonSchema(
+ JsonElement.Parse(
+ """
+ {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "age": { "type": "integer" }
+ },
+ "required": ["name", "age"]
+ }
+ """
+ ),
+ "person_info"
+ ),
+ };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Tell me about Albert Einstein. Respond with his name and age at death.",
+ options,
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("Einstein", textContent.Text);
+ Assert.Contains("76", textContent.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithNestedObjectSchema_ReturnsStructuredJSON()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-sonnet-4-5-20250929",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Tell me about the book '1984' by George Orwell."
+ }]
+ }],
+ "output_config": {
+ "format": {
+ "type": "json_schema",
+ "schema": {
+ "type": "object",
+ "properties": {
+ "title": { "type": "string" },
+ "author": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "birth_year": { "type": "integer" }
+ },
+ "required": ["name", "birth_year"],
+ "additionalProperties": false
+ },
+ "published_year": {
+ "type": "integer"
+ }
+ },
+ "required": ["title", "author", "published_year"],
+ "additionalProperties": false
+ }
+ }
+ }
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_format_02",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-sonnet-4-5-20250929",
+ "content": [{
+ "type": "text",
+ "text": "{\"title\":\"1984\",\"author\":{\"name\":\"George Orwell\",\"birth_year\":1903},\"published_year\":1949}"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 30,
+ "output_tokens": 25
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-sonnet-4-5-20250929");
+
+ ChatOptions options = new()
+ {
+ ResponseFormat = ChatResponseFormat.ForJsonSchema(
+ JsonElement.Parse(
+ """
+ {
+ "type": "object",
+ "properties": {
+ "title": { "type": "string" },
+ "author": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "birth_year": { "type": "integer" }
+ },
+ "required": ["name", "birth_year"]
+ },
+ "published_year": { "type": "integer" }
+ },
+ "required": ["title", "author", "published_year"]
+ }
+ """
+ ),
+ "book_info"
+ ),
+ };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Tell me about the book '1984' by George Orwell.",
+ options,
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("1984", textContent.Text);
+ Assert.Contains("Orwell", textContent.Text);
+ Assert.Contains("1903", textContent.Text);
+ Assert.Contains("1949", textContent.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithArraySchema_ReturnsStructuredJSON()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-sonnet-4-5-20250929",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "List 3 common fruits: apple, orange, and banana."
+ }]
+ }],
+ "output_config": {
+ "format": {
+ "type": "json_schema",
+ "schema": {
+ "type": "object",
+ "properties": {
+ "fruits": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "color": { "type": "string" },
+ "is_citrus": { "type": "boolean" }
+ },
+ "required": ["name", "color", "is_citrus"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required": ["fruits"],
+ "additionalProperties": false
+ }
+ }
+ }
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_format_03",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-sonnet-4-5-20250929",
+ "content": [{
+ "type": "text",
+ "text": "{\"fruits\":[{\"name\":\"apple\",\"color\":\"red\",\"is_citrus\":false},{\"name\":\"orange\",\"color\":\"orange\",\"is_citrus\":true},{\"name\":\"banana\",\"color\":\"yellow\",\"is_citrus\":false}]}"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 35,
+ "output_tokens": 40
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-sonnet-4-5-20250929");
+
+ ChatOptions options = new()
+ {
+ ResponseFormat = ChatResponseFormat.ForJsonSchema(
+ JsonElement.Parse(
+ """
+ {
+ "type": "object",
+ "properties": {
+ "fruits": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "color": { "type": "string" },
+ "is_citrus": { "type": "boolean" }
+ },
+ "required": ["name", "color", "is_citrus"]
+ }
+ }
+ },
+ "required": ["fruits"]
+ }
+ """
+ ),
+ "fruit_list"
+ ),
+ };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "List 3 common fruits: apple, orange, and banana.",
+ options,
+ TestContext.Current.CancellationToken
+ );
+
+ Assert.NotNull(response);
+ TextContent textContent = Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Contains("apple", textContent.Text);
+ Assert.Contains("orange", textContent.Text);
+ Assert.Contains("banana", textContent.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithHostedCodeInterpreterTool()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Execute code"
+ }]
+ }],
+ "tools": [{
+ "type": "code_execution_20250825",
+ "name": "code_execution"
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_code_exec_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "I can execute code."
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 15,
+ "output_tokens": 6
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatOptions options = new() { Tools = [new HostedCodeInterpreterTool()] };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Execute code",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_CodeExecutionToolResult_WithError()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Test code execution error"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_code_error_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "code_execution_tool_result",
+ "tool_use_id": "code_exec_error_1",
+ "content": {
+ "type": "code_execution_tool_result_error",
+ "error_code": "execution_time_exceeded"
+ }
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Test code execution error",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ CodeInterpreterToolResultContent codeResult =
+ Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.NotNull(codeResult);
+ Assert.Equal("code_exec_error_1", codeResult.CallId);
+ Assert.NotNull(codeResult.Outputs);
+ Assert.Single(codeResult.Outputs);
+
+ ErrorContent errorContent = Assert.IsType(codeResult.Outputs[0]);
+ Assert.Equal("ExecutionTimeExceeded", errorContent.ErrorCode);
+ }
+
+ [Theory]
+ [InlineData("code_execution")]
+ [InlineData("bash_code_execution")]
+ public async Task GetResponseAsync_CodeExecutionResult_WithStdout(string executionType)
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Run code"
+ }]
+ }]
+ }
+ """,
+ actualResponse: $$"""
+ {
+ "id": "msg_stdout_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "{{executionType}}_tool_result",
+ "tool_use_id": "exec_1",
+ "content": {
+ "type": "{{executionType}}_result",
+ "stdout": "Hello World\n42\n",
+ "stderr": "",
+ "return_code": 0,
+ "content": []
+ }
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Run code",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ CodeInterpreterToolResultContent codeResult =
+ Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.Equal("exec_1", codeResult.CallId);
+ Assert.NotNull(codeResult.Outputs);
+ Assert.Single(codeResult.Outputs);
+
+ TextContent textOutput = Assert.IsType(codeResult.Outputs[0]);
+ Assert.Equal("Hello World\n42\n", textOutput.Text);
+ }
+
+ [Theory]
+ [InlineData("code_execution", "Division by zero error", 1)]
+ [InlineData("bash_code_execution", "bash: command not found: nonexistent", 127)]
+ public async Task GetResponseAsync_CodeExecutionResult_WithStderrAndNonZeroReturnCode(
+ string executionType,
+ string stderrMessage,
+ int returnCode
+ )
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Run failing code"
+ }]
+ }]
+ }
+ """,
+ actualResponse: $$"""
+ {
+ "id": "msg_stderr_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "{{executionType}}_tool_result",
+ "tool_use_id": "exec_2",
+ "content": {
+ "type": "{{executionType}}_result",
+ "stdout": "",
+ "stderr": "{{stderrMessage}}",
+ "return_code": {{returnCode}},
+ "content": []
+ }
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Run failing code",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ CodeInterpreterToolResultContent codeResult =
+ Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.NotNull(codeResult.Outputs);
+ Assert.Single(codeResult.Outputs);
+
+ ErrorContent errorOutput = Assert.IsType(codeResult.Outputs[0]);
+ Assert.Equal(stderrMessage, errorOutput.Message);
+ Assert.Equal(
+ returnCode.ToString(System.Globalization.CultureInfo.InvariantCulture),
+ errorOutput.ErrorCode
+ );
+ }
+
+ [Theory]
+ [InlineData("code_execution")]
+ [InlineData("bash_code_execution")]
+ public async Task GetResponseAsync_CodeExecutionResult_WithFileOutputs(string executionType)
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Create file"
+ }]
+ }]
+ }
+ """,
+ actualResponse: $$"""
+ {
+ "id": "msg_files_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "{{executionType}}_tool_result",
+ "tool_use_id": "exec_3",
+ "content": {
+ "type": "{{executionType}}_result",
+ "stdout": "File created",
+ "stderr": "",
+ "return_code": 0,
+ "content": [{
+ "type": "{{executionType}}_output",
+ "file_id": "file_output_123"
+ }, {
+ "type": "{{executionType}}_output",
+ "file_id": "file_output_456"
}]
}
- ]
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Create file",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+
+ CodeInterpreterToolResultContent codeResult =
+ Assert.IsType(response.Messages[0].Contents[0]);
+ Assert.NotNull(codeResult.Outputs);
+ Assert.Equal(3, codeResult.Outputs.Count);
+
+ TextContent textOutput = Assert.IsType(codeResult.Outputs[0]);
+ Assert.Equal("File created", textOutput.Text);
+
+ HostedFileContent fileOutput1 = Assert.IsType(codeResult.Outputs[1]);
+ Assert.Equal("file_output_123", fileOutput1.FileId);
+
+ HostedFileContent fileOutput2 = Assert.IsType(codeResult.Outputs[2]);
+ Assert.Equal("file_output_456", fileOutput2.FileId);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithAIFunctionTool_AdditionalProperties_FlowsThrough()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Use enhanced tool"
+ }]
+ }],
+ "tools": [{
+ "name": "enhanced_tool",
+ "description": "A tool with additional properties",
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "query": {
+ "type": "string"
+ }
+ },
+ "required": ["query"],
+ "additionalProperties": false
+ },
+ "defer_loading": true,
+ "strict": true,
+ "input_examples": [
+ {
+ "query": "example query"
+ }
+ ]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_enhanced_tool_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Tool is ready"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 40,
+ "output_tokens": 10
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ var enhancedFunction = AIFunctionFactory.Create(
+ (string query) => "result",
+ new AIFunctionFactoryOptions
+ {
+ Name = "enhanced_tool",
+ Description = "A tool with additional properties",
+ AdditionalProperties = new Dictionary
+ {
+ [nameof(Tool.DeferLoading)] = true,
+ [nameof(Tool.Strict)] = true,
+ [nameof(Tool.InputExamples)] = new List>
+ {
+ new() { ["query"] = JsonSerializer.SerializeToElement("example query") },
+ },
+ },
+ }
+ );
+
+ ChatOptions options = new() { Tools = [enhancedFunction] };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Use enhanced tool",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WithAIFunctionTool_PartialAdditionalProperties()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Use strict tool"
+ }]
+ }],
+ "tools": [{
+ "name": "strict_tool",
+ "description": "A tool with only strict property",
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "value": {
+ "type": "integer"
+ }
+ },
+ "required": ["value"],
+ "additionalProperties": false
+ },
+ "strict": true
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_strict_tool_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [{
+ "type": "text",
+ "text": "Strict mode enabled"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 35,
+ "output_tokens": 8
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ var strictFunction = AIFunctionFactory.Create(
+ (int value) => value * 2,
+ new AIFunctionFactoryOptions
+ {
+ Name = "strict_tool",
+ Description = "A tool with only strict property",
+ AdditionalProperties = new Dictionary
+ {
+ [nameof(Tool.Strict)] = true,
+ },
+ }
+ );
+
+ ChatOptions options = new() { Tools = [strictFunction] };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Use strict tool",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
+ ///
+ /// Validates that all JSON schema transformations are applied correctly when using
+ /// ChatResponseFormat.ForJsonSchema. Tests:
+ ///
+ /// - Numeric constraints (minimum, maximum, multipleOf) → description
+ /// - String constraints (minLength, maxLength, pattern) → description
+ /// - Unsupported string format → description
+ /// - Supported string format (email) preserved
+ /// - Array minItems > 1 → description
+ /// - Array minItems ≤ 1 preserved
+ /// - oneOf → anyOf conversion (with nested object getting additionalProperties: false)
+ /// - enum preserved
+ /// - const preserved
+ /// - title preserved
+ /// - Unsupported properties (default) → description
+ /// - Nested object gets additionalProperties: false
+ /// - Root object gets additionalProperties: false
+ ///
+ ///
+ [Fact]
+ public async Task GetResponseAsync_ResponseFormatSchema_AllTransformationsApplied()
+ {
+ string inputSchema = """
+ {
+ "type": "object",
+ "properties": {
+ "score": {
+ "type": "integer",
+ "description": "A score",
+ "minimum": 0,
+ "maximum": 100,
+ "multipleOf": 5
+ },
+ "code": {
+ "type": "string",
+ "minLength": 3,
+ "maxLength": 10,
+ "pattern": "^[A-Z]+$"
+ },
+ "phone": {
+ "type": "string",
+ "format": "phone"
+ },
+ "email": {
+ "type": "string",
+ "format": "email"
+ },
+ "tags": {
+ "type": "array",
+ "items": { "type": "string" },
+ "minItems": 3
+ },
+ "ids": {
+ "type": "array",
+ "items": { "type": "string" },
+ "minItems": 1
+ },
+ "value": {
+ "oneOf": [
+ { "type": "string" },
+ {
+ "type": "object",
+ "properties": { "x": { "type": "integer" } },
+ "required": ["x"]
+ }
+ ]
+ },
+ "status": {
+ "type": "string",
+ "enum": ["active", "inactive"]
+ },
+ "level": {
+ "type": "string",
+ "const": "admin"
+ },
+ "name": {
+ "type": "string",
+ "title": "Full Name"
+ },
+ "note": {
+ "type": "string",
+ "default": "N/A"
+ },
+ "nested": {
+ "type": "object",
+ "properties": {
+ "inner": { "type": "string" }
+ },
+ "required": ["inner"]
+ }
+ },
+ "required": ["score", "code", "phone", "email", "tags", "ids", "value", "status", "level", "name", "note", "nested"]
+ }
+ """;
+
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-sonnet-4-5-20250929",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "test"
+ }]
+ }],
+ "output_config": {
+ "format": {
+ "type": "json_schema",
+ "schema": {
+ "type": "object",
+ "properties": {
+ "score": {
+ "type": "integer",
+ "description": "A score\n\n{minimum: 0, maximum: 100, multipleOf: 5}"
+ },
+ "code": {
+ "type": "string",
+ "description": "{minLength: 3, maxLength: 10, pattern: \"^[A-Z]+$\"}"
+ },
+ "phone": {
+ "type": "string",
+ "description": "{format: \"phone\"}"
+ },
+ "email": {
+ "type": "string",
+ "format": "email"
+ },
+ "tags": {
+ "type": "array",
+ "items": { "type": "string" },
+ "description": "{minItems: 3}"
+ },
+ "ids": {
+ "type": "array",
+ "items": { "type": "string" },
+ "minItems": 1
+ },
+ "value": {
+ "anyOf": [
+ { "type": "string" },
+ {
+ "type": "object",
+ "properties": { "x": { "type": "integer" } },
+ "required": ["x"],
+ "additionalProperties": false
+ }
+ ]
+ },
+ "status": {
+ "type": "string",
+ "enum": ["active", "inactive"]
+ },
+ "level": {
+ "type": "string",
+ "const": "admin"
+ },
+ "name": {
+ "type": "string",
+ "title": "Full Name"
+ },
+ "note": {
+ "type": "string",
+ "description": "{default: \"N/A\"}"
+ },
+ "nested": {
+ "type": "object",
+ "properties": {
+ "inner": { "type": "string" }
+ },
+ "required": ["inner"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["score", "code", "phone", "email", "tags", "ids", "value", "status", "level", "name", "note", "nested"],
+ "additionalProperties": false
+ }
+ }
+ }
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_transform_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-sonnet-4-5-20250929",
+ "content": [{
+ "type": "text",
+ "text": "{}"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 25,
+ "output_tokens": 10
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-sonnet-4-5-20250929");
+
+ ChatOptions options = new()
+ {
+ ResponseFormat = ChatResponseFormat.ForJsonSchema(
+ JsonElement.Parse(inputSchema),
+ "test_schema"
+ ),
+ };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "test",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
+ ///
+ /// Validates the same schema transformations as
+ /// but through
+ /// the tool path, ensuring both code paths apply the same
+ /// transform pipeline.
+ ///
+ [Fact]
+ public async Task GetResponseAsync_ToolDeclarationSchema_AllTransformationsApplied()
+ {
+ string inputSchema = """
+ {
+ "type": "object",
+ "properties": {
+ "score": {
+ "type": "integer",
+ "description": "A score",
+ "minimum": 0,
+ "maximum": 100,
+ "multipleOf": 5
+ },
+ "code": {
+ "type": "string",
+ "minLength": 3,
+ "maxLength": 10,
+ "pattern": "^[A-Z]+$"
+ },
+ "phone": {
+ "type": "string",
+ "format": "phone"
+ },
+ "email": {
+ "type": "string",
+ "format": "email"
+ },
+ "tags": {
+ "type": "array",
+ "items": { "type": "string" },
+ "minItems": 3
+ },
+ "ids": {
+ "type": "array",
+ "items": { "type": "string" },
+ "minItems": 1
+ },
+ "value": {
+ "oneOf": [
+ { "type": "string" },
+ {
+ "type": "object",
+ "properties": { "x": { "type": "integer" } },
+ "required": ["x"]
+ }
+ ]
+ },
+ "status": {
+ "type": "string",
+ "enum": ["active", "inactive"]
+ },
+ "level": {
+ "type": "string",
+ "const": "admin"
+ },
+ "name": {
+ "type": "string",
+ "title": "Full Name"
+ },
+ "note": {
+ "type": "string",
+ "default": "N/A"
+ },
+ "nested": {
+ "type": "object",
+ "properties": {
+ "inner": { "type": "string" }
+ },
+ "required": ["inner"]
+ }
+ },
+ "required": ["score", "code", "phone", "email", "tags", "ids", "value", "status", "level", "name", "note", "nested"]
+ }
+ """;
+
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-sonnet-4-5-20250929",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "test"
+ }]
+ }],
+ "tools": [{
+ "name": "test_tool",
+ "description": "A test tool",
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "score": {
+ "type": "integer",
+ "description": "A score\n\n{minimum: 0, maximum: 100, multipleOf: 5}"
+ },
+ "code": {
+ "type": "string",
+ "description": "{minLength: 3, maxLength: 10, pattern: \"^[A-Z]+$\"}"
+ },
+ "phone": {
+ "type": "string",
+ "description": "{format: \"phone\"}"
+ },
+ "email": {
+ "type": "string",
+ "format": "email"
+ },
+ "tags": {
+ "type": "array",
+ "items": { "type": "string" },
+ "description": "{minItems: 3}"
+ },
+ "ids": {
+ "type": "array",
+ "items": { "type": "string" },
+ "minItems": 1
+ },
+ "value": {
+ "anyOf": [
+ { "type": "string" },
+ {
+ "type": "object",
+ "properties": { "x": { "type": "integer" } },
+ "required": ["x"],
+ "additionalProperties": false
+ }
+ ]
+ },
+ "status": {
+ "type": "string",
+ "enum": ["active", "inactive"]
+ },
+ "level": {
+ "type": "string",
+ "const": "admin"
+ },
+ "name": {
+ "type": "string",
+ "title": "Full Name"
+ },
+ "note": {
+ "type": "string",
+ "description": "{default: \"N/A\"}"
+ },
+ "nested": {
+ "type": "object",
+ "properties": {
+ "inner": { "type": "string" }
+ },
+ "required": ["inner"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["score", "code", "phone", "email", "tags", "ids", "value", "status", "level", "name", "note", "nested"],
+ "additionalProperties": false
+ }
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_transform_02",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-sonnet-4-5-20250929",
+ "content": [{
+ "type": "text",
+ "text": "ok"
+ }],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 25,
+ "output_tokens": 10
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-sonnet-4-5-20250929");
+
+ var declaration = AIFunctionFactory.CreateDeclaration(
+ "test_tool",
+ "A test tool",
+ JsonElement.Parse(inputSchema),
+ null
+ );
+
+ ChatOptions options = new() { Tools = [declaration] };
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "test",
+ options,
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ServerToolUseBlock_BashCodeExecution_MapsToDataContentWithShMediaType()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Run a bash command"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_bash_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_bash_01",
+ "name": "bash_code_execution",
+ "caller": { "type": "direct" },
+ "input": { "command": "echo hello" }
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Run a bash command",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var ciCall = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_bash_01", ciCall.CallId);
+ Assert.NotNull(ciCall.Inputs);
+ Assert.Single(ciCall.Inputs);
+ var codeInput = Assert.IsType(ciCall.Inputs[0]);
+ Assert.Equal("application/x-sh", codeInput.MediaType);
+ Assert.Equal("echo hello", Encoding.UTF8.GetString(codeInput.Data.ToArray()));
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ServerToolUseBlock_TextEditorCodeExecution_MapsToDataContentWithTextPlainMediaType()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Create a file"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_te_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_te_01",
+ "name": "text_editor_code_execution",
+ "caller": { "type": "direct" },
+ "input": { "command": "create" }
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Create a file",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var ciCall = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_te_01", ciCall.CallId);
+ Assert.NotNull(ciCall.Inputs);
+ Assert.Single(ciCall.Inputs);
+ var codeInput = Assert.IsType(ciCall.Inputs[0]);
+ Assert.Equal("text/plain", codeInput.MediaType);
+ Assert.Equal("create", Encoding.UTF8.GetString(codeInput.Data.ToArray()));
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ServerToolUseBlock_CodeExecution_WithMissingCodeKey_InputsNotPopulated()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Execute something"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_nocode_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_nocode_01",
+ "name": "code_execution",
+ "caller": { "type": "direct" },
+ "input": { "language": "python" }
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Execute something",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var ciCall = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_nocode_01", ciCall.CallId);
+ Assert.Null(ciCall.Inputs);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ServerToolUseBlock_UnknownName_MapsToToolCallContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Search tools"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_unknown_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_ts_01",
+ "name": "tool_search_tool_regex",
+ "caller": { "type": "direct" },
+ "input": {}
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Search tools",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var tc = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_ts_01", tc.CallId);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ServerToolUseBlock_WebSearch_WithoutQueryInput_QueriesNotPopulated()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Search the web"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_ws_noquery_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_ws_noq_01",
+ "name": "web_search",
+ "caller": { "type": "direct" },
+ "input": {}
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Search the web",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var wsc = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_ws_noq_01", wsc.CallId);
+ Assert.Null(wsc.Queries);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_WebFetchToolResultBlock_MapsToWebSearchToolResultContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Fetch a page"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_wf_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "web_fetch_tool_result",
+ "tool_use_id": "srvtoolu_wf_01",
+ "caller": { "type": "direct" },
+ "content": {
+ "type": "web_fetch_result",
+ "url": "https://example.com/article.html",
+ "retrieved_at": "2025-01-01T00:00:00Z",
+ "content": {
+ "type": "document",
+ "citations": null,
+ "source": {
+ "type": "text",
+ "media_type": "text/plain",
+ "data": "fetched content"
+ },
+ "title": "Article"
+ }
+ }
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Fetch a page",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var result = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_wf_01", result.CallId);
+ Assert.NotNull(result.Results);
+ Assert.Single(result.Results);
+ var uriContent = Assert.IsType(result.Results[0]);
+ Assert.Equal(new Uri("https://example.com/article.html"), uriContent.Uri);
+ Assert.Equal("text/html", uriContent.MediaType);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_TextEditorCodeExecutionResult_ViewOperation()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "View a file"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_te_view_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "text_editor_code_execution_tool_result",
+ "tool_use_id": "srvtoolu_te_01",
+ "content": {
+ "type": "text_editor_code_execution_view_result",
+ "file_type": "text",
+ "content": "print('hello')",
+ "num_lines": 1,
+ "start_line": 1,
+ "total_lines": 1
+ }
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "View a file",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var ciResult = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_te_01", ciResult.CallId);
+ Assert.NotNull(ciResult.Outputs);
+ Assert.Single(ciResult.Outputs);
+ var textOutput = Assert.IsType(ciResult.Outputs[0]);
+ Assert.Equal("print('hello')", textOutput.Text);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_TextEditorCodeExecutionResult_WithError()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Edit missing file"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_te_err_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "text_editor_code_execution_tool_result",
+ "tool_use_id": "srvtoolu_te_02",
+ "content": {
+ "type": "text_editor_code_execution_tool_result_error",
+ "error_code": "file_not_found",
+ "error_message": "File not found: /tmp/missing.py"
+ }
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Edit missing file",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var ciResult = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_te_02", ciResult.CallId);
+ Assert.NotNull(ciResult.Outputs);
+ Assert.Single(ciResult.Outputs);
+ var errorContent = Assert.IsType(ciResult.Outputs[0]);
+ Assert.Equal("File not found: /tmp/missing.py", errorContent.Message);
+ Assert.Equal("FileNotFound", errorContent.ErrorCode);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ToolSearchToolResultBlock_MapsToToolResultContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Find tools"
+ }]
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_ts_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "tool_search_tool_result",
+ "tool_use_id": "srvtoolu_ts_01",
+ "content": {
+ "type": "tool_search_tool_search_result",
+ "tool_references": []
+ }
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 10,
+ "output_tokens": 5
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Find tools",
+ new(),
+ TestContext.Current.CancellationToken
+ );
+ Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+ var result = Assert.IsType(contents[0]);
+ Assert.Equal("srvtoolu_ts_01", result.CallId);
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_WithServerToolResultInContentBlockStart()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "max_tokens": 1024,
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Search the web"
+ }]
+ }],
+ "stream": true
+ }
+ """,
+ actualResponse: """
+ event: message_start
+ data: {"type":"message_start","message":{"id":"msg_stream_ws_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"web_search_tool_result","tool_use_id":"srvtoolu_ws_stream_01","caller":{"type":"direct"},"content":[{"type":"web_search_result","title":"Stream Result","url":"https://example.com/stream","encrypted_content":"enc_stream","page_age":"1 day ago"}]}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0}
+
+ event: message_delta
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":5}}
+
+ event: message_stop
+ data: {"type":"message_stop"}
+
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ List updates = [];
+ await foreach (
+ var update in chatClient.GetStreamingResponseAsync(
+ "Search the web",
+ new(),
+ TestContext.Current.CancellationToken
+ )
+ )
+ {
+ updates.Add(update);
+ }
+ Assert.NotEmpty(updates);
+
+ var wsResultUpdates = updates
+ .SelectMany(u => u.Contents.OfType())
+ .ToList();
+ Assert.Single(wsResultUpdates);
+
+ var wsResult = wsResultUpdates[0];
+ Assert.Equal("srvtoolu_ws_stream_01", wsResult.CallId);
+ Assert.NotNull(wsResult.Results);
+ Assert.Single(wsResult.Results);
+
+ var uriContent = Assert.IsType(wsResult.Results[0]);
+ Assert.Equal(new Uri("https://example.com/stream"), uriContent.Uri);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_EncryptedCodeExecutionResult_MapsStderrAndFiles()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Run code"
+ }]
+ }],
+ "max_tokens": 1024,
+ "tools": [{
+ "type": "code_execution_20250825",
+ "name": "code_execution"
+ }]
+ }
+ """,
+ actualResponse: """
+ {
+ "id": "msg_enc_01",
+ "type": "message",
+ "role": "assistant",
+ "model": "claude-haiku-4-5",
+ "content": [
+ {
+ "type": "server_tool_use",
+ "id": "srvtoolu_enc_01",
+ "name": "code_execution",
+ "caller": { "type": "direct" },
+ "input": { "code": "print('hello')" }
+ },
+ {
+ "type": "code_execution_tool_result",
+ "tool_use_id": "srvtoolu_enc_01",
+ "content": {
+ "type": "encrypted_code_execution_result",
+ "encrypted_stdout": "base64encryptedstuff",
+ "stderr": "warning: something",
+ "return_code": 1,
+ "content": [
+ { "type": "code_execution_output", "file_id": "file_out_01" },
+ { "type": "code_execution_output", "file_id": "file_out_02" }
+ ]
+ }
+ }
+ ],
+ "stop_reason": "end_turn",
+ "usage": {
+ "input_tokens": 25,
+ "output_tokens": 20
+ }
+ }
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ ChatResponse response = await chatClient.GetResponseAsync(
+ "Run code",
+ new() { Tools = [new HostedCodeInterpreterTool()] },
+ TestContext.Current.CancellationToken
+ );
+
+ var contents = response.Messages[0].Contents;
+ var ciResult = Assert.IsType(contents[1]);
+ Assert.Equal("srvtoolu_enc_01", ciResult.CallId);
+ Assert.NotNull(ciResult.Outputs);
+
+ // Encrypted stdout is not surfaced, but stderr and files are
+ var errorOutput = Assert.IsType(ciResult.Outputs[0]);
+ Assert.Equal("warning: something", errorOutput.Message);
+ Assert.Equal("1", errorOutput.ErrorCode);
+
+ var file1 = Assert.IsType(ciResult.Outputs[1]);
+ Assert.Equal("file_out_01", file1.FileId);
+
+ var file2 = Assert.IsType(ciResult.Outputs[2]);
+ Assert.Equal("file_out_02", file2.FileId);
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_ContainerUploadBlock_MapsToHostedFileContent()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
+ {
+ "model": "claude-haiku-4-5",
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Upload a file"
+ }]
+ }],
+ "max_tokens": 1024,
+ "tools": [{
+ "type": "code_execution_20250825",
+ "name": "code_execution"
+ }]
}
""",
actualResponse: """
{
- "id": "msg_uri_img_01",
+ "id": "msg_cu_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
- "content": [{
- "type": "text",
- "text": "Image URL received"
- }],
+ "content": [
+ {
+ "type": "container_upload",
+ "file_id": "file_container_01"
+ },
+ {
+ "type": "text",
+ "text": "File uploaded."
+ }
+ ],
"stop_reason": "end_turn",
"usage": {
- "input_tokens": 32,
- "output_tokens": 8
+ "input_tokens": 10,
+ "output_tokens": 5
}
}
"""
@@ -4488,135 +7469,154 @@ public async Task GetResponseAsync_WithFunctionResultContent_UriContent_Image()
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List messages =
- [
- new ChatMessage(ChatRole.User, "Get image URL"),
- new ChatMessage(
- ChatRole.Assistant,
- [
- new FunctionCallContent(
- "tool_uri_img",
- "url_tool",
- new Dictionary()
- ),
- ]
- ),
- new ChatMessage(
- ChatRole.User,
- [
- new FunctionResultContent(
- "tool_uri_img",
- new UriContent(new Uri("https://example.com/image.png"), "image/png")
- ),
- ]
- ),
- ];
-
ChatResponse response = await chatClient.GetResponseAsync(
- messages,
- new(),
+ "Upload a file",
+ new() { Tools = [new HostedCodeInterpreterTool()] },
TestContext.Current.CancellationToken
);
- Assert.NotNull(response);
+
+ var contents = response.Messages[0].Contents;
+
+ var hostedFile = Assert.IsType(contents[0]);
+ Assert.Equal("file_container_01", hostedFile.FileId);
+ Assert.NotNull(hostedFile.RawRepresentation);
+
+ var text = Assert.IsType(contents[1]);
+ Assert.Equal("File uploaded.", text.Text);
}
[Fact]
- public async Task GetResponseAsync_WithFunctionResultContent_UriContent_PDF()
+ public async Task GetStreamingResponseAsync_ContainerUploadBlock_MapsToHostedFileContent()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"max_tokens": 1024,
"model": "claude-haiku-4-5",
- "messages": [
- {
- "role": "user",
- "content": [{
- "type": "text",
- "text": "Get PDF URL"
- }]
- },
- {
- "role": "assistant",
- "content": [{
- "type": "tool_use",
- "id": "tool_uri_pdf",
- "name": "pdf_url_tool",
- "input": {}
- }]
- },
- {
- "role": "user",
- "content": [{
- "type": "tool_result",
- "tool_use_id": "tool_uri_pdf",
- "is_error": false,
- "content": [{
- "type": "document",
- "source": {
- "type": "url",
- "url": "https://example.com/document.pdf"
- }
- }]
- }]
- }
- ]
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Upload a file"
+ }]
+ }],
+ "stream": true
}
""",
actualResponse: """
+ event: message_start
+ data: {"type":"message_start","message":{"id":"msg_stream_cu_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"container_upload","file_id":"file_stream_container_01"}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0}
+
+ event: message_delta
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":5}}
+
+ event: message_stop
+ data: {"type":"message_stop"}
+
+ """
+ );
+
+ IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
+
+ List updates = [];
+ await foreach (
+ var update in chatClient.GetStreamingResponseAsync(
+ "Upload a file",
+ new(),
+ TestContext.Current.CancellationToken
+ )
+ )
+ {
+ updates.Add(update);
+ }
+ Assert.NotEmpty(updates);
+
+ var hostedFiles = updates.SelectMany(u => u.Contents.OfType()).ToList();
+ Assert.Single(hostedFiles);
+ Assert.Equal("file_stream_container_01", hostedFiles[0].FileId);
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_CitationsDelta_MapsToAnnotation()
+ {
+ VerbatimHttpHandler handler = new(
+ expectedRequest: """
{
- "id": "msg_uri_pdf_01",
- "type": "message",
- "role": "assistant",
+ "max_tokens": 1024,
"model": "claude-haiku-4-5",
- "content": [{
- "type": "text",
- "text": "PDF URL received"
+ "messages": [{
+ "role": "user",
+ "content": [{
+ "type": "text",
+ "text": "Search and cite"
+ }]
}],
- "stop_reason": "end_turn",
- "usage": {
- "input_tokens": 35,
- "output_tokens": 9
- }
+ "stream": true
}
+ """,
+ actualResponse: """
+ event: message_start
+ data: {"type":"message_start","message":{"id":"msg_stream_cite_01","type":"message","role":"assistant","model":"claude-haiku-4-5","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":10,"output_tokens":0}}}
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The Eiffel Tower is 330m tall."}}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"citations_delta","citation":{"type":"web_search_result_location","cited_text":"330 meters tall","encrypted_index":"enc","title":"Eiffel Tower Facts","url":"https://example.com/eiffel"}}}
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0}
+
+ event: message_delta
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":15}}
+
+ event: message_stop
+ data: {"type":"message_stop"}
+
"""
);
IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");
- List messages =
- [
- new ChatMessage(ChatRole.User, "Get PDF URL"),
- new ChatMessage(
- ChatRole.Assistant,
- [
- new FunctionCallContent(
- "tool_uri_pdf",
- "pdf_url_tool",
- new Dictionary()
- ),
- ]
- ),
- new ChatMessage(
- ChatRole.User,
- [
- new FunctionResultContent(
- "tool_uri_pdf",
- new UriContent(
- new Uri("https://example.com/document.pdf"),
- "application/pdf"
- )
- ),
- ]
- ),
- ];
+ List updates = [];
+ await foreach (
+ var update in chatClient.GetStreamingResponseAsync(
+ "Search and cite",
+ new(),
+ TestContext.Current.CancellationToken
+ )
+ )
+ {
+ updates.Add(update);
+ }
+ Assert.NotEmpty(updates);
- ChatResponse response = await chatClient.GetResponseAsync(
- messages,
- new(),
- TestContext.Current.CancellationToken
+ // Verify text came through
+ var allText = string.Concat(
+ updates.SelectMany(u => u.Contents.OfType()).Select(c => c.Text)
);
- Assert.NotNull(response);
+ Assert.Contains("Eiffel Tower", allText);
+
+ // Verify citation annotation came through
+ var annotatedContents = updates
+ .SelectMany(u => u.Contents.OfType())
+ .Where(t => t.Annotations is { Count: > 0 })
+ .ToList();
+ Assert.Single(annotatedContents);
+
+ var annotation = Assert.IsType(annotatedContents[0].Annotations![0]);
+ Assert.Equal("Eiffel Tower Facts", annotation.Title);
+ Assert.Equal("330 meters tall", annotation.Snippet);
+ Assert.Equal(new Uri("https://example.com/eiffel"), annotation.Url);
}
protected sealed class VerbatimHttpHandler(string expectedRequest, string actualResponse)
@@ -4654,4 +7654,48 @@ CancellationToken cancellationToken
};
}
}
+
+ [Theory]
+ [InlineData("https://example.com/doc.pdf", "application/pdf")]
+ [InlineData("https://example.com/page.html", "text/html")]
+ [InlineData("https://example.com/path/resource", "application/octet-stream")]
+ [InlineData("https://example.com/Photo.JPG", "image/jpeg")]
+ [InlineData("https://example.com/file.xyz123", "application/octet-stream")]
+ [InlineData(".py", "text/x-python")]
+ [InlineData(".sh", "application/x-sh")]
+ [InlineData(".js", "text/javascript")]
+ [InlineData(".pdf", "application/pdf")]
+ [InlineData(".PY", "text/x-python")]
+ [InlineData(".PNG", "image/png")]
+ [InlineData(".unknown", "application/octet-stream")]
+ public void InferMediaTypeFromExtension_ReturnsExpectedType(
+ string urlOrPath,
+ string expectedMediaType
+ )
+ {
+ Assert.Equal(
+ expectedMediaType,
+ AnthropicClientExtensions.InferMediaTypeFromExtension(urlOrPath)
+ );
+ }
+
+ [Theory]
+ [InlineData(null, "")]
+ [InlineData("image/jpeg", ".jpg")]
+ [InlineData("text/x-python", ".py")]
+ [InlineData("application/pdf", ".pdf")]
+ [InlineData("application/yaml", ".yaml")]
+ [InlineData("text/javascript", ".js")]
+ [InlineData("text/typescript", ".ts")]
+ [InlineData("application/x-custom-unknown", "")]
+ public void InferExtensionFromMediaType_ReturnsExpectedExtension(
+ string? mediaType,
+ string expectedExtension
+ )
+ {
+ Assert.Equal(
+ expectedExtension,
+ AnthropicClientExtensions.InferExtensionFromMediaType(mediaType)
+ );
+ }
}
diff --git a/src/Anthropic.Tests/Models/Beta/AnthropicBetaTest.cs b/src/Anthropic.Tests/Models/Beta/AnthropicBetaTest.cs
index e54d4d1a4..0df8d7b63 100644
--- a/src/Anthropic.Tests/Models/Beta/AnthropicBetaTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/AnthropicBetaTest.cs
@@ -28,6 +28,7 @@ public class AnthropicBetaTest : TestBase
[InlineData(AnthropicBeta.ModelContextWindowExceeded2025_08_26)]
[InlineData(AnthropicBeta.Skills2025_10_02)]
[InlineData(AnthropicBeta.FastMode2026_02_01)]
+ [InlineData(AnthropicBeta.Output300k2026_03_24)]
public void Validation_Works(AnthropicBeta rawValue)
{
// force implicit conversion because Theory can't do that for us
@@ -68,6 +69,7 @@ public void InvalidEnumValidationThrows_Works()
[InlineData(AnthropicBeta.ModelContextWindowExceeded2025_08_26)]
[InlineData(AnthropicBeta.Skills2025_10_02)]
[InlineData(AnthropicBeta.FastMode2026_02_01)]
+ [InlineData(AnthropicBeta.Output300k2026_03_24)]
public void SerializationRoundtrip_Works(AnthropicBeta rawValue)
{
// force implicit conversion because Theory can't do that for us
diff --git a/src/Anthropic.Tests/Models/Beta/Files/DeletedFileTest.cs b/src/Anthropic.Tests/Models/Beta/Files/DeletedFileTest.cs
index e0a4ace88..02d41524a 100644
--- a/src/Anthropic.Tests/Models/Beta/Files/DeletedFileTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Files/DeletedFileTest.cs
@@ -10,9 +10,13 @@ public class DeletedFileTest : TestBase
[Fact]
public void FieldRoundtrip_Works()
{
- var model = new DeletedFile { ID = "id", Type = Type.FileDeleted };
+ var model = new DeletedFile
+ {
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ Type = Type.FileDeleted,
+ };
- string expectedID = "id";
+ string expectedID = "file_011CNha8iCJcU1wXNR6q4V8w";
ApiEnum expectedType = Type.FileDeleted;
Assert.Equal(expectedID, model.ID);
@@ -22,7 +26,11 @@ public void FieldRoundtrip_Works()
[Fact]
public void SerializationRoundtrip_Works()
{
- var model = new DeletedFile { ID = "id", Type = Type.FileDeleted };
+ var model = new DeletedFile
+ {
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ Type = Type.FileDeleted,
+ };
string json = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
var deserialized = JsonSerializer.Deserialize(
@@ -36,7 +44,11 @@ public void SerializationRoundtrip_Works()
[Fact]
public void FieldRoundtripThroughSerialization_Works()
{
- var model = new DeletedFile { ID = "id", Type = Type.FileDeleted };
+ var model = new DeletedFile
+ {
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ Type = Type.FileDeleted,
+ };
string element = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
var deserialized = JsonSerializer.Deserialize(
@@ -45,7 +57,7 @@ public void FieldRoundtripThroughSerialization_Works()
);
Assert.NotNull(deserialized);
- string expectedID = "id";
+ string expectedID = "file_011CNha8iCJcU1wXNR6q4V8w";
ApiEnum expectedType = Type.FileDeleted;
Assert.Equal(expectedID, deserialized.ID);
@@ -55,7 +67,11 @@ public void FieldRoundtripThroughSerialization_Works()
[Fact]
public void Validation_Works()
{
- var model = new DeletedFile { ID = "id", Type = Type.FileDeleted };
+ var model = new DeletedFile
+ {
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ Type = Type.FileDeleted,
+ };
model.Validate();
}
@@ -63,7 +79,7 @@ public void Validation_Works()
[Fact]
public void OptionalNonNullablePropertiesUnsetAreNotSet_Works()
{
- var model = new DeletedFile { ID = "id" };
+ var model = new DeletedFile { ID = "file_011CNha8iCJcU1wXNR6q4V8w" };
Assert.Null(model.Type);
Assert.False(model.RawData.ContainsKey("type"));
@@ -72,7 +88,7 @@ public void OptionalNonNullablePropertiesUnsetAreNotSet_Works()
[Fact]
public void OptionalNonNullablePropertiesUnsetValidation_Works()
{
- var model = new DeletedFile { ID = "id" };
+ var model = new DeletedFile { ID = "file_011CNha8iCJcU1wXNR6q4V8w" };
model.Validate();
}
@@ -82,7 +98,7 @@ public void OptionalNonNullablePropertiesSetToNullAreNotSet_Works()
{
var model = new DeletedFile
{
- ID = "id",
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
// Null should be interpreted as omitted for these properties
Type = null,
@@ -97,7 +113,7 @@ public void OptionalNonNullablePropertiesSetToNullValidation_Works()
{
var model = new DeletedFile
{
- ID = "id",
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
// Null should be interpreted as omitted for these properties
Type = null,
@@ -109,7 +125,11 @@ public void OptionalNonNullablePropertiesSetToNullValidation_Works()
[Fact]
public void CopyConstructor_Works()
{
- var model = new DeletedFile { ID = "id", Type = Type.FileDeleted };
+ var model = new DeletedFile
+ {
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ Type = Type.FileDeleted,
+ };
DeletedFile copied = new(model);
diff --git a/src/Anthropic.Tests/Models/Beta/Files/FileListPageResponseTest.cs b/src/Anthropic.Tests/Models/Beta/Files/FileListPageResponseTest.cs
index 40eaad61a..303081c86 100644
--- a/src/Anthropic.Tests/Models/Beta/Files/FileListPageResponseTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Files/FileListPageResponseTest.cs
@@ -17,34 +17,34 @@ public void FieldRoundtrip_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
HasMore = true,
- LastID = "last_id",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
};
List expectedData =
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
];
- string expectedFirstID = "first_id";
+ string expectedFirstID = "file_011CNha8iCJcU1wXNR6q4V8w";
bool expectedHasMore = true;
- string expectedLastID = "last_id";
+ string expectedLastID = "file_013Zva2CMHLNnXjNJJKqJ2EF";
Assert.Equal(expectedData.Count, model.Data.Count);
for (int i = 0; i < expectedData.Count; i++)
@@ -65,17 +65,17 @@ public void SerializationRoundtrip_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
HasMore = true,
- LastID = "last_id",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
};
string json = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
@@ -96,17 +96,17 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
HasMore = true,
- LastID = "last_id",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
};
string element = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
@@ -120,17 +120,17 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
];
- string expectedFirstID = "first_id";
+ string expectedFirstID = "file_011CNha8iCJcU1wXNR6q4V8w";
bool expectedHasMore = true;
- string expectedLastID = "last_id";
+ string expectedLastID = "file_013Zva2CMHLNnXjNJJKqJ2EF";
Assert.Equal(expectedData.Count, deserialized.Data.Count);
for (int i = 0; i < expectedData.Count; i++)
@@ -151,17 +151,17 @@ public void Validation_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
HasMore = true,
- LastID = "last_id",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
};
model.Validate();
@@ -176,16 +176,16 @@ public void OptionalNonNullablePropertiesUnsetAreNotSet_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
- LastID = "last_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
};
Assert.Null(model.HasMore);
@@ -201,16 +201,16 @@ public void OptionalNonNullablePropertiesUnsetValidation_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
- LastID = "last_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
};
model.Validate();
@@ -225,16 +225,16 @@ public void OptionalNonNullablePropertiesSetToNullAreNotSet_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
- LastID = "last_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
// Null should be interpreted as omitted for these properties
HasMore = null,
@@ -253,16 +253,16 @@ public void OptionalNonNullablePropertiesSetToNullValidation_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
- LastID = "last_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
// Null should be interpreted as omitted for these properties
HasMore = null,
@@ -280,12 +280,12 @@ public void OptionalNullablePropertiesUnsetAreNotSet_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
HasMore = true,
@@ -306,12 +306,12 @@ public void OptionalNullablePropertiesUnsetValidation_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
HasMore = true,
@@ -329,12 +329,12 @@ public void OptionalNullablePropertiesSetToNullAreSetToNull_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
HasMore = true,
@@ -358,12 +358,12 @@ public void OptionalNullablePropertiesSetToNullValidation_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
HasMore = true,
@@ -384,17 +384,17 @@ public void CopyConstructor_Works()
[
new()
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
},
],
- FirstID = "first_id",
+ FirstID = "file_011CNha8iCJcU1wXNR6q4V8w",
HasMore = true,
- LastID = "last_id",
+ LastID = "file_013Zva2CMHLNnXjNJJKqJ2EF",
};
FileListPageResponse copied = new(model);
diff --git a/src/Anthropic.Tests/Models/Beta/Files/FileMetadataTest.cs b/src/Anthropic.Tests/Models/Beta/Files/FileMetadataTest.cs
index a02048493..10feabfca 100644
--- a/src/Anthropic.Tests/Models/Beta/Files/FileMetadataTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Files/FileMetadataTest.cs
@@ -12,21 +12,21 @@ public void FieldRoundtrip_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
};
- string expectedID = "id";
- DateTimeOffset expectedCreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z");
- string expectedFilename = "x";
- string expectedMimeType = "x";
- long expectedSizeBytes = 0;
+ string expectedID = "file_011CNha8iCJcU1wXNR6q4V8w";
+ DateTimeOffset expectedCreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z");
+ string expectedFilename = "document.pdf";
+ string expectedMimeType = "application/pdf";
+ long expectedSizeBytes = 102400;
JsonElement expectedType = JsonSerializer.SerializeToElement("file");
- bool expectedDownloadable = true;
+ bool expectedDownloadable = false;
Assert.Equal(expectedID, model.ID);
Assert.Equal(expectedCreatedAt, model.CreatedAt);
@@ -42,12 +42,12 @@ public void SerializationRoundtrip_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
};
string json = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
@@ -64,12 +64,12 @@ public void FieldRoundtripThroughSerialization_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
};
string element = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
@@ -79,13 +79,13 @@ public void FieldRoundtripThroughSerialization_Works()
);
Assert.NotNull(deserialized);
- string expectedID = "id";
- DateTimeOffset expectedCreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z");
- string expectedFilename = "x";
- string expectedMimeType = "x";
- long expectedSizeBytes = 0;
+ string expectedID = "file_011CNha8iCJcU1wXNR6q4V8w";
+ DateTimeOffset expectedCreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z");
+ string expectedFilename = "document.pdf";
+ string expectedMimeType = "application/pdf";
+ long expectedSizeBytes = 102400;
JsonElement expectedType = JsonSerializer.SerializeToElement("file");
- bool expectedDownloadable = true;
+ bool expectedDownloadable = false;
Assert.Equal(expectedID, deserialized.ID);
Assert.Equal(expectedCreatedAt, deserialized.CreatedAt);
@@ -101,12 +101,12 @@ public void Validation_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
};
model.Validate();
@@ -117,11 +117,11 @@ public void OptionalNonNullablePropertiesUnsetAreNotSet_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
};
Assert.Null(model.Downloadable);
@@ -133,11 +133,11 @@ public void OptionalNonNullablePropertiesUnsetValidation_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
};
model.Validate();
@@ -148,11 +148,11 @@ public void OptionalNonNullablePropertiesSetToNullAreNotSet_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
// Null should be interpreted as omitted for these properties
Downloadable = null,
@@ -167,11 +167,11 @@ public void OptionalNonNullablePropertiesSetToNullValidation_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
// Null should be interpreted as omitted for these properties
Downloadable = null,
@@ -185,12 +185,12 @@ public void CopyConstructor_Works()
{
var model = new FileMetadata
{
- ID = "id",
- CreatedAt = DateTimeOffset.Parse("2019-12-27T18:11:19.117Z"),
- Filename = "x",
- MimeType = "x",
- SizeBytes = 0,
- Downloadable = true,
+ ID = "file_011CNha8iCJcU1wXNR6q4V8w",
+ CreatedAt = DateTimeOffset.Parse("2025-04-15T18:37:24.100435Z"),
+ Filename = "document.pdf",
+ MimeType = "application/pdf",
+ SizeBytes = 102400,
+ Downloadable = false,
};
FileMetadata copied = new(model);
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/Batches/BatchCreateParamsTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/Batches/BatchCreateParamsTest.cs
index e48a4fced..78e51fbfe 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/Batches/BatchCreateParamsTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/Batches/BatchCreateParamsTest.cs
@@ -36,9 +36,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -115,7 +115,10 @@ public void FieldRoundtrip_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -176,9 +179,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -251,7 +254,10 @@ public void FieldRoundtrip_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -332,9 +338,9 @@ public void OptionalNonNullableParamsUnsetAreNotSet_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -411,7 +417,10 @@ public void OptionalNonNullableParamsUnsetAreNotSet_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -480,9 +489,9 @@ public void OptionalNonNullableParamsSetToNullAreNotSet_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -559,7 +568,10 @@ public void OptionalNonNullableParamsSetToNullAreNotSet_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -631,9 +643,9 @@ public void Url_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -710,7 +722,10 @@ public void Url_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -781,9 +796,9 @@ public void AddHeadersToRequest_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -860,7 +875,10 @@ public void AddHeadersToRequest_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -934,9 +952,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -1013,7 +1031,10 @@ public void CopyConstructor_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -1083,9 +1104,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -1158,7 +1179,10 @@ public void FieldRoundtrip_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -1209,9 +1233,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -1284,7 +1308,10 @@ public void FieldRoundtrip_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -1343,9 +1370,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -1418,7 +1445,10 @@ public void SerializationRoundtrip_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -1480,9 +1510,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -1555,7 +1585,10 @@ public void FieldRoundtripThroughSerialization_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -1613,9 +1646,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -1688,7 +1721,10 @@ public void FieldRoundtripThroughSerialization_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -1747,9 +1783,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -1822,7 +1858,10 @@ public void Validation_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -1881,9 +1920,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -1956,7 +1995,10 @@ public void CopyConstructor_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -2017,9 +2059,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -2092,7 +2134,10 @@ public void FieldRoundtrip_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -2145,9 +2190,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -2224,7 +2269,7 @@ public void FieldRoundtrip_Works()
);
double expectedTemperature = 1;
Messages::BetaThinkingConfigParam expectedThinking =
- new Messages::BetaThinkingConfigEnabled(1024);
+ new Messages::BetaThinkingConfigAdaptive() { Display = Messages::Display.Summarized };
Messages::BetaToolChoice expectedToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -2321,9 +2366,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -2396,7 +2441,10 @@ public void SerializationRoundtrip_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -2454,9 +2502,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -2529,7 +2577,10 @@ public void FieldRoundtripThroughSerialization_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -2586,9 +2637,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -2665,7 +2716,7 @@ public void FieldRoundtripThroughSerialization_Works()
);
double expectedTemperature = 1;
Messages::BetaThinkingConfigParam expectedThinking =
- new Messages::BetaThinkingConfigEnabled(1024);
+ new Messages::BetaThinkingConfigAdaptive() { Display = Messages::Display.Summarized };
Messages::BetaToolChoice expectedToolChoice = new Messages::BetaToolChoiceAuto()
{
DisableParallelToolUse = true,
@@ -2762,9 +2813,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -2837,7 +2888,10 @@ public void Validation_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -2892,9 +2946,9 @@ public void OptionalNonNullablePropertiesUnsetAreNotSet_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -2967,9 +3021,9 @@ public void OptionalNonNullablePropertiesUnsetValidation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -3017,9 +3071,9 @@ public void OptionalNonNullablePropertiesSetToNullAreNotSet_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -3107,9 +3161,9 @@ public void OptionalNonNullablePropertiesSetToNullValidation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -3210,7 +3264,10 @@ public void OptionalNullablePropertiesUnsetAreNotSet_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -3314,7 +3371,10 @@ public void OptionalNullablePropertiesUnsetValidation_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -3407,7 +3467,10 @@ public void OptionalNullablePropertiesSetToNullAreSetToNull_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -3518,7 +3581,10 @@ public void OptionalNullablePropertiesSetToNullValidation_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -3580,9 +3646,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -3655,7 +3721,10 @@ public void CopyConstructor_Works()
]
),
Temperature = 1,
- Thinking = new Messages::BetaThinkingConfigEnabled(1024),
+ Thinking = new Messages::BetaThinkingConfigAdaptive()
+ {
+ Display = Messages::Display.Summarized,
+ },
ToolChoice = new Messages::BetaToolChoiceAuto() { DisableParallelToolUse = true },
Tools =
[
@@ -3709,9 +3778,9 @@ public void BetaContainerParamsValidationWorks()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -3735,9 +3804,9 @@ public void BetaContainerParamsSerializationRoundtripWorks()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchIndividualResponseTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchIndividualResponseTest.cs
index 5c2360bf7..1ac9c15e1 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchIndividualResponseTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchIndividualResponseTest.cs
@@ -27,9 +27,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -112,9 +112,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -205,9 +205,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -304,9 +304,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -396,9 +396,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -489,9 +489,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -582,9 +582,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchResultTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchResultTest.cs
index 6cbf6cc7f..7a36b5e4c 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchResultTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchResultTest.cs
@@ -25,9 +25,9 @@ public void SucceededValidationWorks()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -140,9 +140,9 @@ public void SucceededSerializationRoundtripWorks()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchSucceededResultTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchSucceededResultTest.cs
index e851b9f32..aaface610 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchSucceededResultTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/Batches/BetaMessageBatchSucceededResultTest.cs
@@ -25,9 +25,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -107,9 +107,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -194,9 +194,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -290,9 +290,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -379,9 +379,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -466,9 +466,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -556,9 +556,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaContainerParamsTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaContainerParamsTest.cs
index 0d28b42a3..508e71ab1 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaContainerParamsTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaContainerParamsTest.cs
@@ -17,9 +17,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -29,9 +29,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
];
@@ -54,9 +54,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -80,9 +80,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -99,9 +99,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
];
@@ -124,9 +124,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -182,9 +182,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaContainerTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaContainerTest.cs
index b01c7e7d4..60a6aedbd 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaContainerTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaContainerTest.cs
@@ -19,9 +19,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -32,9 +32,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
];
@@ -59,9 +59,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -86,9 +86,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -106,9 +106,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
];
@@ -133,9 +133,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -154,9 +154,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaMessageTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaMessageTest.cs
index 2f340621e..83ddb52a1 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaMessageTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaMessageTest.cs
@@ -23,9 +23,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -98,9 +98,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -195,9 +195,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -284,9 +284,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -366,9 +366,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -463,9 +463,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -546,9 +546,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageDeltaEventTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageDeltaEventTest.cs
index 9df0f297d..a59dcc659 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageDeltaEventTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageDeltaEventTest.cs
@@ -31,9 +31,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -84,9 +84,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -148,9 +148,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -215,9 +215,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -275,9 +275,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -339,9 +339,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -400,9 +400,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -455,9 +455,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -473,9 +473,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -501,9 +501,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -533,9 +533,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -558,9 +558,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
};
@@ -586,9 +586,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -612,9 +612,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageStartEventTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageStartEventTest.cs
index 27edfb53d..3b9a01d6b 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageStartEventTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageStartEventTest.cs
@@ -24,9 +24,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -106,9 +106,9 @@ public void FieldRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -193,9 +193,9 @@ public void SerializationRoundtrip_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -289,9 +289,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -378,9 +378,9 @@ public void FieldRoundtripThroughSerialization_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -465,9 +465,9 @@ public void Validation_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -555,9 +555,9 @@ public void CopyConstructor_Works()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageStreamEventTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageStreamEventTest.cs
index be9d23273..af0f940cc 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageStreamEventTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaRawMessageStreamEventTest.cs
@@ -23,9 +23,9 @@ public void StartValidationWorks()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -120,9 +120,9 @@ public void DeltaValidationWorks()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -222,9 +222,9 @@ public void StartSerializationRoundtripWorks()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
@@ -325,9 +325,9 @@ public void DeltaSerializationRoundtripWorks()
[
new()
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Messages::Type.Anthropic,
- Version = "x",
+ Version = "latest",
},
],
},
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaSkillParamsTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaSkillParamsTest.cs
index ad55042e1..11c4c29fa 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaSkillParamsTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaSkillParamsTest.cs
@@ -12,14 +12,14 @@ public void FieldRoundtrip_Works()
{
var model = new BetaSkillParams
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
};
- string expectedSkillID = "x";
+ string expectedSkillID = "pdf";
ApiEnum expectedType = BetaSkillParamsType.Anthropic;
- string expectedVersion = "x";
+ string expectedVersion = "latest";
Assert.Equal(expectedSkillID, model.SkillID);
Assert.Equal(expectedType, model.Type);
@@ -31,9 +31,9 @@ public void SerializationRoundtrip_Works()
{
var model = new BetaSkillParams
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
};
string json = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
@@ -50,9 +50,9 @@ public void FieldRoundtripThroughSerialization_Works()
{
var model = new BetaSkillParams
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
};
string element = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
@@ -62,9 +62,9 @@ public void FieldRoundtripThroughSerialization_Works()
);
Assert.NotNull(deserialized);
- string expectedSkillID = "x";
+ string expectedSkillID = "pdf";
ApiEnum expectedType = BetaSkillParamsType.Anthropic;
- string expectedVersion = "x";
+ string expectedVersion = "latest";
Assert.Equal(expectedSkillID, deserialized.SkillID);
Assert.Equal(expectedType, deserialized.Type);
@@ -76,9 +76,9 @@ public void Validation_Works()
{
var model = new BetaSkillParams
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
};
model.Validate();
@@ -87,7 +87,7 @@ public void Validation_Works()
[Fact]
public void OptionalNonNullablePropertiesUnsetAreNotSet_Works()
{
- var model = new BetaSkillParams { SkillID = "x", Type = BetaSkillParamsType.Anthropic };
+ var model = new BetaSkillParams { SkillID = "pdf", Type = BetaSkillParamsType.Anthropic };
Assert.Null(model.Version);
Assert.False(model.RawData.ContainsKey("version"));
@@ -96,7 +96,7 @@ public void OptionalNonNullablePropertiesUnsetAreNotSet_Works()
[Fact]
public void OptionalNonNullablePropertiesUnsetValidation_Works()
{
- var model = new BetaSkillParams { SkillID = "x", Type = BetaSkillParamsType.Anthropic };
+ var model = new BetaSkillParams { SkillID = "pdf", Type = BetaSkillParamsType.Anthropic };
model.Validate();
}
@@ -106,7 +106,7 @@ public void OptionalNonNullablePropertiesSetToNullAreNotSet_Works()
{
var model = new BetaSkillParams
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
// Null should be interpreted as omitted for these properties
@@ -122,7 +122,7 @@ public void OptionalNonNullablePropertiesSetToNullValidation_Works()
{
var model = new BetaSkillParams
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
// Null should be interpreted as omitted for these properties
@@ -137,9 +137,9 @@ public void CopyConstructor_Works()
{
var model = new BetaSkillParams
{
- SkillID = "x",
+ SkillID = "pdf",
Type = BetaSkillParamsType.Anthropic,
- Version = "x",
+ Version = "latest",
};
BetaSkillParams copied = new(model);
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaSkillTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaSkillTest.cs
index a71419003..4e731a844 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaSkillTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaSkillTest.cs
@@ -12,14 +12,14 @@ public void FieldRoundtrip_Works()
{
var model = new BetaSkill
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Type.Anthropic,
- Version = "x",
+ Version = "latest",
};
- string expectedSkillID = "x";
+ string expectedSkillID = "pdf";
ApiEnum expectedType = Type.Anthropic;
- string expectedVersion = "x";
+ string expectedVersion = "latest";
Assert.Equal(expectedSkillID, model.SkillID);
Assert.Equal(expectedType, model.Type);
@@ -31,9 +31,9 @@ public void SerializationRoundtrip_Works()
{
var model = new BetaSkill
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Type.Anthropic,
- Version = "x",
+ Version = "latest",
};
string json = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
@@ -47,9 +47,9 @@ public void FieldRoundtripThroughSerialization_Works()
{
var model = new BetaSkill
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Type.Anthropic,
- Version = "x",
+ Version = "latest",
};
string element = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
@@ -59,9 +59,9 @@ public void FieldRoundtripThroughSerialization_Works()
);
Assert.NotNull(deserialized);
- string expectedSkillID = "x";
+ string expectedSkillID = "pdf";
ApiEnum expectedType = Type.Anthropic;
- string expectedVersion = "x";
+ string expectedVersion = "latest";
Assert.Equal(expectedSkillID, deserialized.SkillID);
Assert.Equal(expectedType, deserialized.Type);
@@ -73,9 +73,9 @@ public void Validation_Works()
{
var model = new BetaSkill
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Type.Anthropic,
- Version = "x",
+ Version = "latest",
};
model.Validate();
@@ -86,9 +86,9 @@ public void CopyConstructor_Works()
{
var model = new BetaSkill
{
- SkillID = "x",
+ SkillID = "pdf",
Type = Type.Anthropic,
- Version = "x",
+ Version = "latest",
};
BetaSkill copied = new(model);
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigAdaptiveTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigAdaptiveTest.cs
index 999c6f693..4247ab8d7 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigAdaptiveTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigAdaptiveTest.cs
@@ -1,5 +1,6 @@
using System.Text.Json;
using Anthropic.Core;
+using Anthropic.Exceptions;
using Anthropic.Models.Beta.Messages;
namespace Anthropic.Tests.Models.Beta.Messages;
@@ -9,17 +10,19 @@ public class BetaThinkingConfigAdaptiveTest : TestBase
[Fact]
public void FieldRoundtrip_Works()
{
- var model = new BetaThinkingConfigAdaptive { };
+ var model = new BetaThinkingConfigAdaptive { Display = Display.Summarized };
JsonElement expectedType = JsonSerializer.SerializeToElement("adaptive");
+ ApiEnum expectedDisplay = Display.Summarized;
Assert.True(JsonElement.DeepEquals(expectedType, model.Type));
+ Assert.Equal(expectedDisplay, model.Display);
}
[Fact]
public void SerializationRoundtrip_Works()
{
- var model = new BetaThinkingConfigAdaptive { };
+ var model = new BetaThinkingConfigAdaptive { Display = Display.Summarized };
string json = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
var deserialized = JsonSerializer.Deserialize(
@@ -33,7 +36,7 @@ public void SerializationRoundtrip_Works()
[Fact]
public void FieldRoundtripThroughSerialization_Works()
{
- var model = new BetaThinkingConfigAdaptive { };
+ var model = new BetaThinkingConfigAdaptive { Display = Display.Summarized };
string element = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
var deserialized = JsonSerializer.Deserialize(
@@ -43,25 +46,119 @@ public void FieldRoundtripThroughSerialization_Works()
Assert.NotNull(deserialized);
JsonElement expectedType = JsonSerializer.SerializeToElement("adaptive");
+ ApiEnum expectedDisplay = Display.Summarized;
Assert.True(JsonElement.DeepEquals(expectedType, deserialized.Type));
+ Assert.Equal(expectedDisplay, deserialized.Display);
}
[Fact]
public void Validation_Works()
+ {
+ var model = new BetaThinkingConfigAdaptive { Display = Display.Summarized };
+
+ model.Validate();
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesUnsetAreNotSet_Works()
+ {
+ var model = new BetaThinkingConfigAdaptive { };
+
+ Assert.Null(model.Display);
+ Assert.False(model.RawData.ContainsKey("display"));
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesUnsetValidation_Works()
{
var model = new BetaThinkingConfigAdaptive { };
model.Validate();
}
+ [Fact]
+ public void OptionalNullablePropertiesSetToNullAreSetToNull_Works()
+ {
+ var model = new BetaThinkingConfigAdaptive { Display = null };
+
+ Assert.Null(model.Display);
+ Assert.True(model.RawData.ContainsKey("display"));
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesSetToNullValidation_Works()
+ {
+ var model = new BetaThinkingConfigAdaptive { Display = null };
+
+ model.Validate();
+ }
+
[Fact]
public void CopyConstructor_Works()
{
- var model = new BetaThinkingConfigAdaptive { };
+ var model = new BetaThinkingConfigAdaptive { Display = Display.Summarized };
BetaThinkingConfigAdaptive copied = new(model);
Assert.Equal(model, copied);
}
}
+
+public class DisplayTest : TestBase
+{
+ [Theory]
+ [InlineData(Display.Summarized)]
+ [InlineData(Display.Omitted)]
+ public void Validation_Works(Display rawValue)
+ {
+ // force implicit conversion because Theory can't do that for us
+ ApiEnum value = rawValue;
+ value.Validate();
+ }
+
+ [Fact]
+ public void InvalidEnumValidationThrows_Works()
+ {
+ var value = JsonSerializer.Deserialize>(
+ JsonSerializer.SerializeToElement("invalid value"),
+ ModelBase.SerializerOptions
+ );
+
+ Assert.NotNull(value);
+ Assert.Throws(() => value.Validate());
+ }
+
+ [Theory]
+ [InlineData(Display.Summarized)]
+ [InlineData(Display.Omitted)]
+ public void SerializationRoundtrip_Works(Display rawValue)
+ {
+ // force implicit conversion because Theory can't do that for us
+ ApiEnum value = rawValue;
+
+ string json = JsonSerializer.Serialize(value, ModelBase.SerializerOptions);
+ var deserialized = JsonSerializer.Deserialize>(
+ json,
+ ModelBase.SerializerOptions
+ );
+
+ Assert.Equal(value, deserialized);
+ }
+
+ [Fact]
+ public void InvalidEnumSerializationRoundtrip_Works()
+ {
+ var value = JsonSerializer.Deserialize>(
+ JsonSerializer.SerializeToElement("invalid value"),
+ ModelBase.SerializerOptions
+ );
+ string json = JsonSerializer.Serialize(value, ModelBase.SerializerOptions);
+ var deserialized = JsonSerializer.Deserialize>(
+ json,
+ ModelBase.SerializerOptions
+ );
+
+ Assert.Equal(value, deserialized);
+ }
+}
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigEnabledTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigEnabledTest.cs
index 5b7d375ef..7784d66c7 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigEnabledTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigEnabledTest.cs
@@ -1,5 +1,6 @@
using System.Text.Json;
using Anthropic.Core;
+using Anthropic.Exceptions;
using Anthropic.Models.Beta.Messages;
namespace Anthropic.Tests.Models.Beta.Messages;
@@ -9,19 +10,30 @@ public class BetaThinkingConfigEnabledTest : TestBase
[Fact]
public void FieldRoundtrip_Works()
{
- var model = new BetaThinkingConfigEnabled { BudgetTokens = 1024 };
+ var model = new BetaThinkingConfigEnabled
+ {
+ BudgetTokens = 1024,
+ Display = BetaThinkingConfigEnabledDisplay.Summarized,
+ };
long expectedBudgetTokens = 1024;
JsonElement expectedType = JsonSerializer.SerializeToElement("enabled");
+ ApiEnum expectedDisplay =
+ BetaThinkingConfigEnabledDisplay.Summarized;
Assert.Equal(expectedBudgetTokens, model.BudgetTokens);
Assert.True(JsonElement.DeepEquals(expectedType, model.Type));
+ Assert.Equal(expectedDisplay, model.Display);
}
[Fact]
public void SerializationRoundtrip_Works()
{
- var model = new BetaThinkingConfigEnabled { BudgetTokens = 1024 };
+ var model = new BetaThinkingConfigEnabled
+ {
+ BudgetTokens = 1024,
+ Display = BetaThinkingConfigEnabledDisplay.Summarized,
+ };
string json = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
var deserialized = JsonSerializer.Deserialize(
@@ -35,7 +47,11 @@ public void SerializationRoundtrip_Works()
[Fact]
public void FieldRoundtripThroughSerialization_Works()
{
- var model = new BetaThinkingConfigEnabled { BudgetTokens = 1024 };
+ var model = new BetaThinkingConfigEnabled
+ {
+ BudgetTokens = 1024,
+ Display = BetaThinkingConfigEnabledDisplay.Summarized,
+ };
string element = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
var deserialized = JsonSerializer.Deserialize(
@@ -46,26 +62,137 @@ public void FieldRoundtripThroughSerialization_Works()
long expectedBudgetTokens = 1024;
JsonElement expectedType = JsonSerializer.SerializeToElement("enabled");
+ ApiEnum expectedDisplay =
+ BetaThinkingConfigEnabledDisplay.Summarized;
Assert.Equal(expectedBudgetTokens, deserialized.BudgetTokens);
Assert.True(JsonElement.DeepEquals(expectedType, deserialized.Type));
+ Assert.Equal(expectedDisplay, deserialized.Display);
}
[Fact]
public void Validation_Works()
+ {
+ var model = new BetaThinkingConfigEnabled
+ {
+ BudgetTokens = 1024,
+ Display = BetaThinkingConfigEnabledDisplay.Summarized,
+ };
+
+ model.Validate();
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesUnsetAreNotSet_Works()
+ {
+ var model = new BetaThinkingConfigEnabled { BudgetTokens = 1024 };
+
+ Assert.Null(model.Display);
+ Assert.False(model.RawData.ContainsKey("display"));
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesUnsetValidation_Works()
{
var model = new BetaThinkingConfigEnabled { BudgetTokens = 1024 };
model.Validate();
}
+ [Fact]
+ public void OptionalNullablePropertiesSetToNullAreSetToNull_Works()
+ {
+ var model = new BetaThinkingConfigEnabled
+ {
+ BudgetTokens = 1024,
+
+ Display = null,
+ };
+
+ Assert.Null(model.Display);
+ Assert.True(model.RawData.ContainsKey("display"));
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesSetToNullValidation_Works()
+ {
+ var model = new BetaThinkingConfigEnabled
+ {
+ BudgetTokens = 1024,
+
+ Display = null,
+ };
+
+ model.Validate();
+ }
+
[Fact]
public void CopyConstructor_Works()
{
- var model = new BetaThinkingConfigEnabled { BudgetTokens = 1024 };
+ var model = new BetaThinkingConfigEnabled
+ {
+ BudgetTokens = 1024,
+ Display = BetaThinkingConfigEnabledDisplay.Summarized,
+ };
BetaThinkingConfigEnabled copied = new(model);
Assert.Equal(model, copied);
}
}
+
+public class BetaThinkingConfigEnabledDisplayTest : TestBase
+{
+ [Theory]
+ [InlineData(BetaThinkingConfigEnabledDisplay.Summarized)]
+ [InlineData(BetaThinkingConfigEnabledDisplay.Omitted)]
+ public void Validation_Works(BetaThinkingConfigEnabledDisplay rawValue)
+ {
+ // force implicit conversion because Theory can't do that for us
+ ApiEnum value = rawValue;
+ value.Validate();
+ }
+
+ [Fact]
+ public void InvalidEnumValidationThrows_Works()
+ {
+ var value = JsonSerializer.Deserialize>(
+ JsonSerializer.SerializeToElement("invalid value"),
+ ModelBase.SerializerOptions
+ );
+
+ Assert.NotNull(value);
+ Assert.Throws(() => value.Validate());
+ }
+
+ [Theory]
+ [InlineData(BetaThinkingConfigEnabledDisplay.Summarized)]
+ [InlineData(BetaThinkingConfigEnabledDisplay.Omitted)]
+ public void SerializationRoundtrip_Works(BetaThinkingConfigEnabledDisplay rawValue)
+ {
+ // force implicit conversion because Theory can't do that for us
+ ApiEnum value = rawValue;
+
+ string json = JsonSerializer.Serialize(value, ModelBase.SerializerOptions);
+ var deserialized = JsonSerializer.Deserialize<
+ ApiEnum
+ >(json, ModelBase.SerializerOptions);
+
+ Assert.Equal(value, deserialized);
+ }
+
+ [Fact]
+ public void InvalidEnumSerializationRoundtrip_Works()
+ {
+ var value = JsonSerializer.Deserialize>(
+ JsonSerializer.SerializeToElement("invalid value"),
+ ModelBase.SerializerOptions
+ );
+ string json = JsonSerializer.Serialize(value, ModelBase.SerializerOptions);
+ var deserialized = JsonSerializer.Deserialize<
+ ApiEnum
+ >(json, ModelBase.SerializerOptions);
+
+ Assert.Equal(value, deserialized);
+ }
+}
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigParamTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigParamTest.cs
index 1730ccf05..2c102a6ae 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigParamTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaThinkingConfigParamTest.cs
@@ -9,7 +9,11 @@ public class BetaThinkingConfigParamTest : TestBase
[Fact]
public void EnabledValidationWorks()
{
- BetaThinkingConfigParam value = new BetaThinkingConfigEnabled(1024);
+ BetaThinkingConfigParam value = new BetaThinkingConfigEnabled()
+ {
+ BudgetTokens = 1024,
+ Display = BetaThinkingConfigEnabledDisplay.Summarized,
+ };
value.Validate();
}
@@ -23,14 +27,21 @@ public void DisabledValidationWorks()
[Fact]
public void AdaptiveValidationWorks()
{
- BetaThinkingConfigParam value = new BetaThinkingConfigAdaptive();
+ BetaThinkingConfigParam value = new BetaThinkingConfigAdaptive()
+ {
+ Display = Display.Summarized,
+ };
value.Validate();
}
[Fact]
public void EnabledSerializationRoundtripWorks()
{
- BetaThinkingConfigParam value = new BetaThinkingConfigEnabled(1024);
+ BetaThinkingConfigParam value = new BetaThinkingConfigEnabled()
+ {
+ BudgetTokens = 1024,
+ Display = BetaThinkingConfigEnabledDisplay.Summarized,
+ };
string element = JsonSerializer.Serialize(value, ModelBase.SerializerOptions);
var deserialized = JsonSerializer.Deserialize(
element,
@@ -56,7 +67,10 @@ public void DisabledSerializationRoundtripWorks()
[Fact]
public void AdaptiveSerializationRoundtripWorks()
{
- BetaThinkingConfigParam value = new BetaThinkingConfigAdaptive();
+ BetaThinkingConfigParam value = new BetaThinkingConfigAdaptive()
+ {
+ Display = Display.Summarized,
+ };
string element = JsonSerializer.Serialize(value, ModelBase.SerializerOptions);
var deserialized = JsonSerializer.Deserialize(
element,
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaToolUnionTest.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaToolUnionTest.cs
index 167548cec..c5c188281 100644
--- a/src/Anthropic.Tests/Models/Beta/Messages/BetaToolUnionTest.cs
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaToolUnionTest.cs
@@ -372,6 +372,25 @@ public void WebFetchTool20260209ValidationWorks()
value.Validate();
}
+ [Fact]
+ public void WebFetchTool20260309ValidationWorks()
+ {
+ BetaToolUnion value = new BetaWebFetchTool20260309()
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ DeferLoading = true,
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ Strict = true,
+ UseCache = true,
+ };
+ value.Validate();
+ }
+
[Fact]
public void SearchToolBm25_20251119ValidationWorks()
{
@@ -892,6 +911,31 @@ public void WebFetchTool20260209SerializationRoundtripWorks()
Assert.Equal(value, deserialized);
}
+ [Fact]
+ public void WebFetchTool20260309SerializationRoundtripWorks()
+ {
+ BetaToolUnion value = new BetaWebFetchTool20260309()
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ DeferLoading = true,
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ Strict = true,
+ UseCache = true,
+ };
+ string element = JsonSerializer.Serialize(value, ModelBase.SerializerOptions);
+ var deserialized = JsonSerializer.Deserialize(
+ element,
+ ModelBase.SerializerOptions
+ );
+
+ Assert.Equal(value, deserialized);
+ }
+
[Fact]
public void SearchToolBm25_20251119SerializationRoundtripWorks()
{
diff --git a/src/Anthropic.Tests/Models/Beta/Messages/BetaWebFetchTool20260309Test.cs b/src/Anthropic.Tests/Models/Beta/Messages/BetaWebFetchTool20260309Test.cs
new file mode 100644
index 000000000..a4572c0bb
--- /dev/null
+++ b/src/Anthropic.Tests/Models/Beta/Messages/BetaWebFetchTool20260309Test.cs
@@ -0,0 +1,447 @@
+using System.Collections.Generic;
+using System.Text.Json;
+using Anthropic.Core;
+using Anthropic.Exceptions;
+using Anthropic.Models.Beta.Messages;
+
+namespace Anthropic.Tests.Models.Beta.Messages;
+
+public class BetaWebFetchTool20260309Test : TestBase
+{
+ [Fact]
+ public void FieldRoundtrip_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ DeferLoading = true,
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ Strict = true,
+ UseCache = true,
+ };
+
+ JsonElement expectedName = JsonSerializer.SerializeToElement("web_fetch");
+ JsonElement expectedType = JsonSerializer.SerializeToElement("web_fetch_20260309");
+ List> expectedAllowedCallers =
+ [
+ BetaWebFetchTool20260309AllowedCaller.Direct,
+ ];
+ List expectedAllowedDomains = ["string"];
+ List expectedBlockedDomains = ["string"];
+ BetaCacheControlEphemeral expectedCacheControl = new() { Ttl = Ttl.Ttl5m };
+ BetaCitationsConfigParam expectedCitations = new() { Enabled = true };
+ bool expectedDeferLoading = true;
+ long expectedMaxContentTokens = 1;
+ long expectedMaxUses = 1;
+ bool expectedStrict = true;
+ bool expectedUseCache = true;
+
+ Assert.True(JsonElement.DeepEquals(expectedName, model.Name));
+ Assert.True(JsonElement.DeepEquals(expectedType, model.Type));
+ Assert.NotNull(model.AllowedCallers);
+ Assert.Equal(expectedAllowedCallers.Count, model.AllowedCallers.Count);
+ for (int i = 0; i < expectedAllowedCallers.Count; i++)
+ {
+ Assert.Equal(expectedAllowedCallers[i], model.AllowedCallers[i]);
+ }
+ Assert.NotNull(model.AllowedDomains);
+ Assert.Equal(expectedAllowedDomains.Count, model.AllowedDomains.Count);
+ for (int i = 0; i < expectedAllowedDomains.Count; i++)
+ {
+ Assert.Equal(expectedAllowedDomains[i], model.AllowedDomains[i]);
+ }
+ Assert.NotNull(model.BlockedDomains);
+ Assert.Equal(expectedBlockedDomains.Count, model.BlockedDomains.Count);
+ for (int i = 0; i < expectedBlockedDomains.Count; i++)
+ {
+ Assert.Equal(expectedBlockedDomains[i], model.BlockedDomains[i]);
+ }
+ Assert.Equal(expectedCacheControl, model.CacheControl);
+ Assert.Equal(expectedCitations, model.Citations);
+ Assert.Equal(expectedDeferLoading, model.DeferLoading);
+ Assert.Equal(expectedMaxContentTokens, model.MaxContentTokens);
+ Assert.Equal(expectedMaxUses, model.MaxUses);
+ Assert.Equal(expectedStrict, model.Strict);
+ Assert.Equal(expectedUseCache, model.UseCache);
+ }
+
+ [Fact]
+ public void SerializationRoundtrip_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ DeferLoading = true,
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ Strict = true,
+ UseCache = true,
+ };
+
+ string json = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
+ var deserialized = JsonSerializer.Deserialize(
+ json,
+ ModelBase.SerializerOptions
+ );
+
+ Assert.Equal(model, deserialized);
+ }
+
+ [Fact]
+ public void FieldRoundtripThroughSerialization_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ DeferLoading = true,
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ Strict = true,
+ UseCache = true,
+ };
+
+ string element = JsonSerializer.Serialize(model, ModelBase.SerializerOptions);
+ var deserialized = JsonSerializer.Deserialize(
+ element,
+ ModelBase.SerializerOptions
+ );
+ Assert.NotNull(deserialized);
+
+ JsonElement expectedName = JsonSerializer.SerializeToElement("web_fetch");
+ JsonElement expectedType = JsonSerializer.SerializeToElement("web_fetch_20260309");
+ List> expectedAllowedCallers =
+ [
+ BetaWebFetchTool20260309AllowedCaller.Direct,
+ ];
+ List expectedAllowedDomains = ["string"];
+ List expectedBlockedDomains = ["string"];
+ BetaCacheControlEphemeral expectedCacheControl = new() { Ttl = Ttl.Ttl5m };
+ BetaCitationsConfigParam expectedCitations = new() { Enabled = true };
+ bool expectedDeferLoading = true;
+ long expectedMaxContentTokens = 1;
+ long expectedMaxUses = 1;
+ bool expectedStrict = true;
+ bool expectedUseCache = true;
+
+ Assert.True(JsonElement.DeepEquals(expectedName, deserialized.Name));
+ Assert.True(JsonElement.DeepEquals(expectedType, deserialized.Type));
+ Assert.NotNull(deserialized.AllowedCallers);
+ Assert.Equal(expectedAllowedCallers.Count, deserialized.AllowedCallers.Count);
+ for (int i = 0; i < expectedAllowedCallers.Count; i++)
+ {
+ Assert.Equal(expectedAllowedCallers[i], deserialized.AllowedCallers[i]);
+ }
+ Assert.NotNull(deserialized.AllowedDomains);
+ Assert.Equal(expectedAllowedDomains.Count, deserialized.AllowedDomains.Count);
+ for (int i = 0; i < expectedAllowedDomains.Count; i++)
+ {
+ Assert.Equal(expectedAllowedDomains[i], deserialized.AllowedDomains[i]);
+ }
+ Assert.NotNull(deserialized.BlockedDomains);
+ Assert.Equal(expectedBlockedDomains.Count, deserialized.BlockedDomains.Count);
+ for (int i = 0; i < expectedBlockedDomains.Count; i++)
+ {
+ Assert.Equal(expectedBlockedDomains[i], deserialized.BlockedDomains[i]);
+ }
+ Assert.Equal(expectedCacheControl, deserialized.CacheControl);
+ Assert.Equal(expectedCitations, deserialized.Citations);
+ Assert.Equal(expectedDeferLoading, deserialized.DeferLoading);
+ Assert.Equal(expectedMaxContentTokens, deserialized.MaxContentTokens);
+ Assert.Equal(expectedMaxUses, deserialized.MaxUses);
+ Assert.Equal(expectedStrict, deserialized.Strict);
+ Assert.Equal(expectedUseCache, deserialized.UseCache);
+ }
+
+ [Fact]
+ public void Validation_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ DeferLoading = true,
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ Strict = true,
+ UseCache = true,
+ };
+
+ model.Validate();
+ }
+
+ [Fact]
+ public void OptionalNonNullablePropertiesUnsetAreNotSet_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ };
+
+ Assert.Null(model.AllowedCallers);
+ Assert.False(model.RawData.ContainsKey("allowed_callers"));
+ Assert.Null(model.DeferLoading);
+ Assert.False(model.RawData.ContainsKey("defer_loading"));
+ Assert.Null(model.Strict);
+ Assert.False(model.RawData.ContainsKey("strict"));
+ Assert.Null(model.UseCache);
+ Assert.False(model.RawData.ContainsKey("use_cache"));
+ }
+
+ [Fact]
+ public void OptionalNonNullablePropertiesUnsetValidation_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ };
+
+ model.Validate();
+ }
+
+ [Fact]
+ public void OptionalNonNullablePropertiesSetToNullAreNotSet_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ MaxContentTokens = 1,
+ MaxUses = 1,
+
+ // Null should be interpreted as omitted for these properties
+ AllowedCallers = null,
+ DeferLoading = null,
+ Strict = null,
+ UseCache = null,
+ };
+
+ Assert.Null(model.AllowedCallers);
+ Assert.False(model.RawData.ContainsKey("allowed_callers"));
+ Assert.Null(model.DeferLoading);
+ Assert.False(model.RawData.ContainsKey("defer_loading"));
+ Assert.Null(model.Strict);
+ Assert.False(model.RawData.ContainsKey("strict"));
+ Assert.Null(model.UseCache);
+ Assert.False(model.RawData.ContainsKey("use_cache"));
+ }
+
+ [Fact]
+ public void OptionalNonNullablePropertiesSetToNullValidation_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ MaxContentTokens = 1,
+ MaxUses = 1,
+
+ // Null should be interpreted as omitted for these properties
+ AllowedCallers = null,
+ DeferLoading = null,
+ Strict = null,
+ UseCache = null,
+ };
+
+ model.Validate();
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesUnsetAreNotSet_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ DeferLoading = true,
+ Strict = true,
+ UseCache = true,
+ };
+
+ Assert.Null(model.AllowedDomains);
+ Assert.False(model.RawData.ContainsKey("allowed_domains"));
+ Assert.Null(model.BlockedDomains);
+ Assert.False(model.RawData.ContainsKey("blocked_domains"));
+ Assert.Null(model.CacheControl);
+ Assert.False(model.RawData.ContainsKey("cache_control"));
+ Assert.Null(model.Citations);
+ Assert.False(model.RawData.ContainsKey("citations"));
+ Assert.Null(model.MaxContentTokens);
+ Assert.False(model.RawData.ContainsKey("max_content_tokens"));
+ Assert.Null(model.MaxUses);
+ Assert.False(model.RawData.ContainsKey("max_uses"));
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesUnsetValidation_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ DeferLoading = true,
+ Strict = true,
+ UseCache = true,
+ };
+
+ model.Validate();
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesSetToNullAreSetToNull_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ DeferLoading = true,
+ Strict = true,
+ UseCache = true,
+
+ AllowedDomains = null,
+ BlockedDomains = null,
+ CacheControl = null,
+ Citations = null,
+ MaxContentTokens = null,
+ MaxUses = null,
+ };
+
+ Assert.Null(model.AllowedDomains);
+ Assert.True(model.RawData.ContainsKey("allowed_domains"));
+ Assert.Null(model.BlockedDomains);
+ Assert.True(model.RawData.ContainsKey("blocked_domains"));
+ Assert.Null(model.CacheControl);
+ Assert.True(model.RawData.ContainsKey("cache_control"));
+ Assert.Null(model.Citations);
+ Assert.True(model.RawData.ContainsKey("citations"));
+ Assert.Null(model.MaxContentTokens);
+ Assert.True(model.RawData.ContainsKey("max_content_tokens"));
+ Assert.Null(model.MaxUses);
+ Assert.True(model.RawData.ContainsKey("max_uses"));
+ }
+
+ [Fact]
+ public void OptionalNullablePropertiesSetToNullValidation_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ DeferLoading = true,
+ Strict = true,
+ UseCache = true,
+
+ AllowedDomains = null,
+ BlockedDomains = null,
+ CacheControl = null,
+ Citations = null,
+ MaxContentTokens = null,
+ MaxUses = null,
+ };
+
+ model.Validate();
+ }
+
+ [Fact]
+ public void CopyConstructor_Works()
+ {
+ var model = new BetaWebFetchTool20260309
+ {
+ AllowedCallers = [BetaWebFetchTool20260309AllowedCaller.Direct],
+ AllowedDomains = ["string"],
+ BlockedDomains = ["string"],
+ CacheControl = new() { Ttl = Ttl.Ttl5m },
+ Citations = new() { Enabled = true },
+ DeferLoading = true,
+ MaxContentTokens = 1,
+ MaxUses = 1,
+ Strict = true,
+ UseCache = true,
+ };
+
+ BetaWebFetchTool20260309 copied = new(model);
+
+ Assert.Equal(model, copied);
+ }
+}
+
+public class BetaWebFetchTool20260309AllowedCallerTest : TestBase
+{
+ [Theory]
+ [InlineData(BetaWebFetchTool20260309AllowedCaller.Direct)]
+ [InlineData(BetaWebFetchTool20260309AllowedCaller.CodeExecution20250825)]
+ [InlineData(BetaWebFetchTool20260309AllowedCaller.CodeExecution20260120)]
+ public void Validation_Works(BetaWebFetchTool20260309AllowedCaller rawValue)
+ {
+ // force implicit conversion because Theory can't do that for us
+ ApiEnum