Skip to content

Commit b4b3a74

Browse files
.Net: Update Response Agent conversation state examples (#12777)
### Motivation and Context <!-- Thank you for your contribution to the semantic-kernel repo! Please help reviewers and future users, providing the following information: 1. Why is this change required? 2. What problem does it solve? 3. What scenario does it contribute to? 4. If it fixes an open issue, please link to the issue here. --> ### Description <!-- Describe your changes, the overall approach, the underlying design. These notes will help understanding how your code works. Thanks! --> ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone 😄
1 parent 068236b commit b4b3a74

File tree

2 files changed

+115
-4
lines changed

2 files changed

+115
-4
lines changed

dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_OpenAIResponseAgent_ConversationState.cs

Lines changed: 114 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ namespace GettingStarted.OpenAIResponseAgents;
1010
/// <summary>
1111
/// This example demonstrates how to manage conversation state during a model interaction using <see cref="OpenAIResponseAgent"/>.
1212
/// OpenAI provides a few ways to manage conversation state, which is important for preserving information across multiple messages or turns in a conversation.
13+
/// See: https://platform.openai.com/docs/guides/conversation-state?api-mode=responses for more information.
1314
/// </summary>
1415
public class Step02_OpenAIResponseAgent_ConversationState(ITestOutputHelper output) : BaseResponsesAgentTest(output)
1516
{
@@ -42,7 +43,36 @@ public async Task ManuallyConstructPastConversationAsync()
4243
}
4344

4445
[Fact]
45-
public async Task ManuallyManageConversationStateWithResponsesChatCompletionApiAsync()
46+
public async Task ManuallyConstructPastConversationStreamingAsync()
47+
{
48+
// Define the agent
49+
OpenAIResponseAgent agent = new(this.Client)
50+
{
51+
StoreEnabled = false,
52+
};
53+
54+
ICollection<ChatMessageContent> messages =
55+
[
56+
new ChatMessageContent(AuthorRole.User, "knock knock."),
57+
new ChatMessageContent(AuthorRole.Assistant, "Who's there?"),
58+
new ChatMessageContent(AuthorRole.User, "Orange.")
59+
];
60+
foreach (ChatMessageContent message in messages)
61+
{
62+
WriteAgentChatMessage(message);
63+
}
64+
65+
// Invoke the agent and output the response
66+
var responseItems = agent.InvokeStreamingAsync(messages);
67+
Console.Write("\n# assistant: ");
68+
await foreach (StreamingChatMessageContent responseItem in responseItems)
69+
{
70+
Console.Write(responseItem.Content);
71+
}
72+
}
73+
74+
[Fact]
75+
public async Task ManageConversationStateWithResponseIdAsync()
4676
{
4777
// Define the agent
4878
OpenAIResponseAgent agent = new(this.Client)
@@ -53,7 +83,7 @@ public async Task ManuallyManageConversationStateWithResponsesChatCompletionApiA
5383
string[] messages =
5484
[
5585
"Tell me a joke?",
56-
"Tell me another?",
86+
"Explain why this is funny?",
5787
];
5888

5989
// Invoke the agent and output the response
@@ -73,7 +103,39 @@ public async Task ManuallyManageConversationStateWithResponsesChatCompletionApiA
73103
}
74104

75105
[Fact]
76-
public async Task ManageConversationStateWithResponseApiAsync()
106+
public async Task ManageConversationStateWithResponseIdStreamingAsync()
107+
{
108+
// Define the agent
109+
OpenAIResponseAgent agent = new(this.Client)
110+
{
111+
StoreEnabled = false,
112+
};
113+
114+
string[] messages =
115+
[
116+
"Tell me a joke?",
117+
"Explain why this is funny?",
118+
];
119+
120+
// Invoke the agent and output the response
121+
AgentThread? agentThread = null;
122+
foreach (string message in messages)
123+
{
124+
var userMessage = new ChatMessageContent(AuthorRole.User, message);
125+
WriteAgentChatMessage(userMessage);
126+
127+
Console.Write("\n# assistant: ");
128+
var responseItems = agent.InvokeStreamingAsync(userMessage, agentThread);
129+
await foreach (AgentResponseItem<StreamingChatMessageContent> responseItem in responseItems)
130+
{
131+
agentThread = responseItem.Thread;
132+
Console.Write(responseItem.Message.Content);
133+
}
134+
}
135+
}
136+
137+
[Fact]
138+
public async Task StoreConversationStateAsync()
77139
{
78140
// Define the agent
79141
OpenAIResponseAgent agent = new(this.Client)
@@ -119,4 +181,53 @@ public async Task ManageConversationStateWithResponseApiAsync()
119181
await agentThread.DeleteAsync();
120182
}
121183
}
184+
185+
[Fact]
186+
public async Task StoreConversationStateWithStreamingAsync()
187+
{
188+
// Define the agent
189+
OpenAIResponseAgent agent = new(this.Client)
190+
{
191+
StoreEnabled = true,
192+
};
193+
194+
string[] messages =
195+
[
196+
"Tell me a joke?",
197+
"Explain why this is funny.",
198+
];
199+
200+
// Invoke the agent and output the response
201+
AgentThread? agentThread = null;
202+
foreach (string message in messages)
203+
{
204+
var userMessage = new ChatMessageContent(AuthorRole.User, message);
205+
WriteAgentChatMessage(userMessage);
206+
207+
Console.Write("\n# assistant: ");
208+
var responseItems = agent.InvokeStreamingAsync(userMessage, agentThread);
209+
await foreach (AgentResponseItem<StreamingChatMessageContent> responseItem in responseItems)
210+
{
211+
agentThread = responseItem.Thread;
212+
Console.Write(responseItem.Message.Content);
213+
}
214+
}
215+
216+
// Display the contents in the latest thread
217+
if (agentThread is not null)
218+
{
219+
this.Output.WriteLine("\n\nResponse Thread Messages\n");
220+
var responseAgentThread = agentThread as OpenAIResponseAgentThread;
221+
var threadMessages = responseAgentThread?.GetMessagesAsync();
222+
if (threadMessages is not null)
223+
{
224+
await foreach (var threadMessage in threadMessages)
225+
{
226+
WriteAgentChatMessage(threadMessage);
227+
}
228+
}
229+
230+
await agentThread.DeleteAsync();
231+
}
232+
}
122233
}

dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ protected BaseResponsesAgentTest(ITestOutputHelper output, string? model = null)
3636

3737
protected VectorStoreClient VectorStoreClient { get; set; }
3838

39-
protected bool EnableLogging { get; set; } = true;
39+
protected bool EnableLogging { get; set; } = false;
4040

4141
/// <inheritdoc/>
4242
protected override OpenAIResponseClient Client { get; }

0 commit comments

Comments
 (0)