Skip to content

Commit 09b98ef

Browse files
committed
wip
1 parent 994700f commit 09b98ef

File tree

48 files changed

+411
-462
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+411
-462
lines changed

examples/Responses/Example01_SimpleResponse.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@ public void Example01_SimpleResponse()
1515
{
1616
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
1717

18-
OpenAIResponse response = client.CreateResponse("Say 'this is a test.'");
18+
ResponseResult response = client.CreateResponse(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")]));
1919

20-
Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}");
20+
Console.WriteLine($"[ASSISTANT]: {response.OutputText}");
2121
}
2222
}
2323

examples/Responses/Example01_SimpleResponseAsync.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,9 @@ public async Task Example01_SimpleResponseAsync()
1616
{
1717
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
1818

19-
OpenAIResponse response = await client.CreateResponseAsync("Say 'this is a test.'");
19+
ResponseResult response = await client.CreateResponseAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")]));
2020

21-
Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}");
21+
Console.WriteLine($"[ASSISTANT]: {response.OutputText}");
2222
}
2323
}
2424

examples/Responses/Example02_SimpleResponseStreaming.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ public void Example02_SimpleResponseStreaming()
1616
{
1717
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
1818

19-
CollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreaming("Say 'this is a test.'");
19+
CollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreaming(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")]));
2020

2121
Console.Write($"[ASSISTANT]: ");
2222
foreach (StreamingResponseUpdate update in responseUpdates)

examples/Responses/Example02_SimpleResponseStreamingAsync.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ public async Task Example02_SimpleResponseStreamingAsync()
1818
{
1919
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
2020

21-
AsyncCollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreamingAsync("Say 'this is a test.'");
21+
AsyncCollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreamingAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")]));
2222

2323
Console.Write($"[ASSISTANT]: ");
2424
await foreach (StreamingResponseUpdate update in responseUpdates)

examples/Responses/Example03_FunctionCalling.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ public void Example03_FunctionCalling()
6969
ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"),
7070
];
7171

72-
ResponseCreationOptions options = new()
72+
CreateResponseOptions options = new(inputItems)
7373
{
7474
Tools = { getCurrentLocationTool, getCurrentWeatherTool },
7575
};
@@ -81,11 +81,11 @@ public void Example03_FunctionCalling()
8181
do
8282
{
8383
requiresAction = false;
84-
OpenAIResponse response = client.CreateResponse(inputItems, options);
84+
ResponseResult response = client.CreateResponse(options);
8585

86-
inputItems.AddRange(response.OutputItems);
86+
inputItems.AddRange(response.Output);
8787

88-
foreach (ResponseItem outputItem in response.OutputItems)
88+
foreach (ResponseItem outputItem in response.Output)
8989
{
9090
if (outputItem is FunctionCallResponseItem functionCall)
9191
{
@@ -132,7 +132,7 @@ public void Example03_FunctionCalling()
132132
}
133133
}
134134

135-
PrintMessageItems(response.OutputItems.OfType<MessageResponseItem>());
135+
PrintMessageItems(response.Output.OfType<MessageResponseItem>());
136136

137137
} while (requiresAction);
138138
}

examples/Responses/Example03_FunctionCallingAsync.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ public async Task Example03_FunctionCallingAsync()
2626
ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"),
2727
];
2828

29-
ResponseCreationOptions options = new()
29+
CreateResponseOptions options = new(inputItems)
3030
{
3131
Tools = { getCurrentLocationTool, getCurrentWeatherTool },
3232
};
@@ -38,11 +38,11 @@ public async Task Example03_FunctionCallingAsync()
3838
do
3939
{
4040
requiresAction = false;
41-
OpenAIResponse response = await client.CreateResponseAsync(inputItems, options);
41+
ResponseResult response = await client.CreateResponseAsync(options);
4242

43-
inputItems.AddRange(response.OutputItems);
43+
inputItems.AddRange(response.Output);
4444

45-
foreach (ResponseItem outputItem in response.OutputItems)
45+
foreach (ResponseItem outputItem in response.Output)
4646
{
4747
if (outputItem is FunctionCallResponseItem functionCall)
4848
{
@@ -89,7 +89,7 @@ public async Task Example03_FunctionCallingAsync()
8989
}
9090
}
9191

92-
PrintMessageItems(response.OutputItems.OfType<MessageResponseItem>());
92+
PrintMessageItems(response.Output.OfType<MessageResponseItem>());
9393

9494
} while (requiresAction);
9595
}

examples/Responses/Example04_FunctionCallingStreaming.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ public void Example04_FunctionCallingStreaming()
2626
ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"),
2727
];
2828

29-
ResponseCreationOptions options = new()
29+
CreateResponseOptions options = new(inputItems)
3030
{
3131
Tools = { getCurrentLocationTool, getCurrentWeatherTool },
3232
};
@@ -38,7 +38,7 @@ public void Example04_FunctionCallingStreaming()
3838
do
3939
{
4040
requiresAction = false;
41-
CollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreaming(inputItems, options);
41+
CollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreaming(options);
4242

4343
foreach (StreamingResponseUpdate update in responseUpdates)
4444
{

examples/Responses/Example04_FunctionCallingStreamingAsync.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ public async Task Example04_FunctionCallingStreamingAsync()
2727
ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"),
2828
];
2929

30-
ResponseCreationOptions options = new()
30+
CreateResponseOptions options = new(inputItems)
3131
{
3232
Tools = { getCurrentLocationTool, getCurrentWeatherTool },
3333
};
@@ -39,7 +39,7 @@ public async Task Example04_FunctionCallingStreamingAsync()
3939
do
4040
{
4141
requiresAction = false;
42-
AsyncCollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreamingAsync(inputItems, options);
42+
AsyncCollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreamingAsync(options);
4343

4444
await foreach (StreamingResponseUpdate update in responseUpdates)
4545
{

examples/Responses/Example05_RemoteMcp.cs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,9 @@ public partial class ResponseExamples
1313
[Test]
1414
public void Example05_RemoteMcp()
1515
{
16-
ResponseCreationOptions options = new()
16+
CreateResponseOptions options = new([
17+
ResponseItem.CreateUserMessageItem("Roll 2d4+1")
18+
])
1719
{
1820
Tools = {
1921
new McpTool(serverLabel: "dmcp", serverUri: new Uri("https://dmcp-server.deno.dev/sse"))
@@ -26,9 +28,9 @@ public void Example05_RemoteMcp()
2628

2729
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
2830

29-
OpenAIResponse response = client.CreateResponse("Roll 2d4+1", options);
31+
ResponseResult response = client.CreateResponse(options);
3032

31-
Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}");
33+
Console.WriteLine($"[ASSISTANT]: {response.OutputText}");
3234
}
3335
}
3436

examples/Responses/Example05_RemoteMcpAsync.cs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@ public partial class ResponseExamples
1414
[Test]
1515
public async Task Example05_RemoteMcpAsync()
1616
{
17-
ResponseCreationOptions options = new()
17+
CreateResponseOptions options = new([
18+
ResponseItem.CreateUserMessageItem("Roll 2d4+1")
19+
])
1820
{
1921
Tools = {
2022
new McpTool(serverLabel: "dmcp", serverUri: new Uri("https://dmcp-server.deno.dev/sse"))
@@ -27,9 +29,9 @@ public async Task Example05_RemoteMcpAsync()
2729

2830
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
2931

30-
OpenAIResponse response = await client.CreateResponseAsync("Roll 2d4+1", options);
32+
ResponseResult response = await client.CreateResponseAsync(options);
3133

32-
Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}");
34+
Console.WriteLine($"[ASSISTANT]: {response.OutputText}");
3335
}
3436
}
3537

0 commit comments

Comments
 (0)