forked from microsoft/agent-framework
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathProgram.cs
More file actions
70 lines (53 loc) · 2.17 KB
/
Program.cs
File metadata and controls
70 lines (53 loc) · 2.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
// Copyright (c) Microsoft. All rights reserved.
// This sample shows how to use background responses with ChatClientAgent and Azure OpenAI Responses.
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using OpenAI.Responses;
var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
.GetOpenAIResponseClient(deploymentName)
.CreateAIAgent();
// Enable background responses (only supported by OpenAI Responses at this time).
AgentRunOptions options = new() { AllowBackgroundResponses = true };
AgentThread thread = agent.GetNewThread();
// Start the initial run.
AgentRunResponse response = await agent.RunAsync("Write a very long novel about otters in space.", thread, options);
// Poll until the response is complete.
while (response.ContinuationToken is { } token)
{
// Wait before polling again.
await Task.Delay(TimeSpan.FromSeconds(2));
// Continue with the token.
options.ContinuationToken = token;
response = await agent.RunAsync(thread, options);
}
// Display the result.
Console.WriteLine(response.Text);
// Reset options and thread for streaming.
options = new() { AllowBackgroundResponses = true };
thread = agent.GetNewThread();
AgentRunResponseUpdate? lastReceivedUpdate = null;
// Start streaming.
await foreach (AgentRunResponseUpdate update in agent.RunStreamingAsync("Write a very long novel about otters in space.", thread, options))
{
// Output each update.
Console.Write(update.Text);
// Track last update.
lastReceivedUpdate = update;
// Simulate connection loss after first piece of content received.
if (update.Text.Length > 0)
{
break;
}
}
// Resume from interruption point.
options.ContinuationToken = lastReceivedUpdate?.ContinuationToken;
await foreach (AgentRunResponseUpdate update in agent.RunStreamingAsync(thread, options))
{
// Output each update.
Console.Write(update.Text);
}