Skip to content

Commit 7015d9c

Browse files
committed
docs: Added FunctionCalling example.
1 parent 07b8eae commit 7015d9c

File tree

11 files changed

+337
-362
lines changed

11 files changed

+337
-362
lines changed

docs/index.md

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,4 +227,14 @@ double? priceInUsd = CreateSpeechRequestModel.Tts1Hd.TryGetPriceInUsd(
227227

228228
Priority place for bugs: https://github.com/tryAGI/OpenAI/issues
229229
Priority place for ideas and general questions: https://github.com/tryAGI/OpenAI/discussions
230-
Discord: https://discord.gg/Ca2xhfBf3v
230+
Discord: https://discord.gg/Ca2xhfBf3v
231+
232+
## Acknowledgments
233+
234+
![JetBrains logo](https://resources.jetbrains.com/storage/products/company/brand/logos/jetbrains.png)
235+
236+
This project is supported by JetBrains through the [Open Source Support Program](https://jb.gg/OpenSourceSupport).
237+
238+
![CodeRabbit logo](https://opengraph.githubassets.com/1c51002d7d0bbe0c4fd72ff8f2e58192702f73a7037102f77e4dbb98ac00ea8f/marketplace/coderabbitai)
239+
240+
This project is supported by CodeRabbit through the [Open Source Support Program](https://github.com/marketplace/coderabbitai).

docs/samples/Assistants.AssistantsWithVision.md

Lines changed: 34 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -46,32 +46,44 @@ ThreadObject thread = await api.Assistants.CreateThreadAsync(new CreateThreadReq
4646
]
4747
});
4848

49-
// AsyncResultCollection<StreamingUpdate> streamingUpdates = api.Assistants.CreateRunStreamingAsync(
50-
// thread,
51-
// assistant,
52-
// new RunCreationOptions()
53-
// {
54-
// AdditionalInstructions = "When possible, try to sneak in puns if you're asked to compare things.",
55-
// });
56-
//
57-
// await foreach (StreamingUpdate streamingUpdate in streamingUpdates)
58-
// {
59-
// if (streamingUpdate.UpdateKind == StreamingUpdateReason.RunCreated)
60-
// {
61-
// Console.WriteLine($"--- Run started! ---");
62-
// }
63-
// if (streamingUpdate is MessageContentUpdate contentUpdate)
64-
// {
65-
// Console.Write(contentUpdate.Text);
66-
// }
67-
// }
68-
69-
RunObject response = await api.Assistants.CreateRunAsync(
49+
var streamingUpdates = api.Assistants.CreateRunAsStreamAsync(
7050
threadId: thread.Id,
7151
assistantId: assistant.Id,
7252
instructions: "When possible, try to sneak in puns if you're asked to compare things.");
7353

74-
Console.WriteLine(response[0].Content);
54+
await foreach (AssistantStreamEvent streamingUpdate in streamingUpdates)
55+
{
56+
if (streamingUpdate.IsRun && streamingUpdate.Run.Value.IsValue1) // RunCreated
57+
{
58+
Console.WriteLine("--- Run started! ---");
59+
}
60+
if (streamingUpdate is { IsMessage: true, Message: var messageStreamEvent } &&
61+
messageStreamEvent.Value is { IsValue3: true, Value3: var delta })
62+
{
63+
foreach (var deltaVariation in delta.Data.Delta.Content ?? [])
64+
{
65+
if (deltaVariation.IsValue1)
66+
{
67+
Console.WriteLine();
68+
Console.WriteLine(deltaVariation.Value1.ImageFile?.FileId);
69+
}
70+
if (deltaVariation.IsValue2)
71+
{
72+
Console.Write(deltaVariation.Value2.Text?.Value);
73+
}
74+
if (deltaVariation.IsValue3)
75+
{
76+
Console.WriteLine();
77+
Console.WriteLine(deltaVariation.Value3.Refusal);
78+
}
79+
if (deltaVariation.IsValue4)
80+
{
81+
Console.WriteLine();
82+
Console.WriteLine(deltaVariation.Value4.ImageUrl?.Url);
83+
}
84+
}
85+
}
86+
}
7587

7688
_ = await api.Files.DeleteFileAsync(pictureOfAppleFile.Id);
7789
_ = await api.Assistants.DeleteThreadAsync(thread.Id);
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
```csharp
2+
using var api = GetAuthenticatedClient();
3+
4+
List<ChatCompletionRequestMessage> messages = [
5+
"What's the weather like today?",
6+
];
7+
8+
var service = new FunctionCallingService();
9+
IList<ChatCompletionTool> tools = service.AsTools();
10+
11+
bool requiresAction;
12+
13+
do
14+
{
15+
requiresAction = false;
16+
CreateChatCompletionResponse chatCompletion = await api.Chat.CreateChatCompletionAsync(
17+
messages,
18+
model: CreateChatCompletionRequestModel.Gpt4o,
19+
tools: tools);
20+
21+
switch (chatCompletion.Choices[0].FinishReason)
22+
{
23+
case CreateChatCompletionResponseChoiceFinishReason.Stop:
24+
{
25+
// Add the assistant message to the conversation history.
26+
messages.Add(chatCompletion.Choices[0].Message.AsRequestMessage());
27+
break;
28+
}
29+
30+
case CreateChatCompletionResponseChoiceFinishReason.ToolCalls:
31+
{
32+
// First, add the assistant message with tool calls to the conversation history.
33+
messages.Add(chatCompletion.Choices[0].Message.AsRequestMessage());
34+
35+
// Then, add a new tool message for each tool call that is resolved.
36+
foreach (ChatCompletionMessageToolCall toolCall in chatCompletion.Choices[0].Message.ToolCalls ?? [])
37+
{
38+
var json = await service.CallAsync(
39+
functionName: toolCall.Function.Name,
40+
argumentsAsJson: toolCall.Function.Arguments);
41+
messages.Add(json.AsToolMessage(toolCall.Id));
42+
}
43+
44+
requiresAction = true;
45+
break;
46+
}
47+
48+
case CreateChatCompletionResponseChoiceFinishReason.Length:
49+
throw new NotImplementedException("Incomplete model output due to MaxTokens parameter or token limit exceeded.");
50+
51+
case CreateChatCompletionResponseChoiceFinishReason.ContentFilter:
52+
throw new NotImplementedException("Omitted content due to a content filter flag.");
53+
54+
case CreateChatCompletionResponseChoiceFinishReason.FunctionCall:
55+
throw new NotImplementedException("Deprecated in favor of tool calls.");
56+
57+
default:
58+
throw new NotImplementedException(chatCompletion.Choices[0].FinishReason.ToString());
59+
}
60+
} while (requiresAction);
61+
62+
foreach (ChatCompletionRequestMessage requestMessage in messages)
63+
{
64+
if (requestMessage.System is { } systemMessage)
65+
{
66+
Console.WriteLine($"[SYSTEM]:");
67+
Console.WriteLine($"{systemMessage.Content.Value1}");
68+
Console.WriteLine();
69+
break;
70+
}
71+
else if (requestMessage.User is { } userMessage)
72+
{
73+
Console.WriteLine($"[USER]:");
74+
Console.WriteLine($"{userMessage.Content.Value1}");
75+
Console.WriteLine();
76+
}
77+
else if (requestMessage.Assistant is { Content: not null } assistantMessage)
78+
{
79+
Console.WriteLine($"[ASSISTANT]:");
80+
Console.WriteLine($"{assistantMessage.Content?.Value1}");
81+
Console.WriteLine();
82+
}
83+
else if (requestMessage.Tool is { } toolMessage)
84+
{
85+
// Do not print any tool messages; let the assistant summarize the tool results instead.
86+
break;
87+
88+
}
89+
}
90+
```
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
```csharp
2+
public enum WeatherUnit
3+
{
4+
Celsius,
5+
Fahrenheit,
6+
}
7+
8+
[OpenAiTools(Strict = true)]
9+
public interface IFunctionCallingService
10+
{
11+
[Description("Get the user's current location")]
12+
public Task<string> GetCurrentLocation(
13+
CancellationToken cancellationToken = default);
14+
15+
[Description("Get the current weather in a given location")]
16+
public Task<string> GetCurrentWeatherAsync(
17+
[Description("The city and state, e.g. Boston, MA")]
18+
string location,
19+
[Description("The temperature unit to use. Infer this from the specified location.")]
20+
WeatherUnit unit = WeatherUnit.Celsius,
21+
CancellationToken cancellationToken = default);
22+
}
23+
24+
public class FunctionCallingService : IFunctionCallingService
25+
{
26+
public Task<string> GetCurrentLocation(
27+
CancellationToken cancellationToken = default)
28+
{
29+
// Call the location API here.
30+
return Task.FromResult("San Francisco");
31+
}
32+
33+
public Task<string> GetCurrentWeatherAsync(
34+
string location,
35+
WeatherUnit unit = WeatherUnit.Celsius,
36+
CancellationToken cancellationToken = default)
37+
{
38+
// Call the weather API here.
39+
return Task.FromResult($"31 {unit:G}");
40+
}
41+
}
42+
```

mkdocs.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ nav:
77
- SimpleChat: samples/Chat.SimpleChat.md
88
- SimpleChatStreaming: samples/Chat.SimpleChatStreaming.md
99
- ChatWithVision: samples/Chat.ChatWithVision.md
10+
- FunctionCalling: samples/Chat.FunctionCalling.md
11+
- FunctionCallingService: samples/Chat.FunctionCallingService.md
1012
- Assistants:
1113
- AssistantsWithVision: samples/Assistants.AssistantsWithVision.md
1214
- ListFiles: samples/Assistants.ListFiles.md

src/helpers/GenerateDocs/Program.cs

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,20 @@
1515
{
1616
var code = await File.ReadAllTextAsync(path);
1717

18-
var start = code.IndexOf("\n {", StringComparison.Ordinal);
19-
var end = code.IndexOf("\n }", StringComparison.Ordinal);
20-
code = code.Substring(start + 4, end - start + 4);
21-
22-
var lines = code.Split('\n')[1..^2];
23-
code = string.Join('\n', lines.Select(x => x.Length > 8 ? x[8..] : string.Empty));
18+
var startExample = code.IndexOf("// # START EXAMPLE #", StringComparison.Ordinal);
19+
if (startExample == -1)
20+
{
21+
var start = code.IndexOf("\n {", StringComparison.Ordinal);
22+
var end = code.IndexOf("\n }", StringComparison.Ordinal);
23+
code = code.Substring(start + 4, end - start + 4);
24+
25+
var lines = code.Split('\n')[1..^2];
26+
code = string.Join('\n', lines.Select(x => x.Length > 8 ? x[8..] : string.Empty));
27+
}
28+
else
29+
{
30+
code = code[(startExample + "// # START EXAMPLE #".Length)..].Trim();
31+
}
2432

2533
var newPath = Path.Combine(newDir, $"{Path.GetFileNameWithoutExtension(path).Replace("Examples.", string.Empty)}.md");
2634
await File.WriteAllTextAsync(newPath, $@"```csharp
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
namespace OpenAI.IntegrationTests.Examples;
2+
3+
public partial class Examples
4+
{
5+
[Test]
6+
[Explicit]
7+
public async Task FunctionCalling()
8+
{
9+
using var api = GetAuthenticatedClient();
10+
11+
List<ChatCompletionRequestMessage> messages = [
12+
"What's the weather like today?",
13+
];
14+
15+
var service = new FunctionCallingService();
16+
IList<ChatCompletionTool> tools = service.AsTools();
17+
18+
bool requiresAction;
19+
20+
do
21+
{
22+
requiresAction = false;
23+
CreateChatCompletionResponse chatCompletion = await api.Chat.CreateChatCompletionAsync(
24+
messages,
25+
model: CreateChatCompletionRequestModel.Gpt4o,
26+
tools: tools);
27+
28+
switch (chatCompletion.Choices[0].FinishReason)
29+
{
30+
case CreateChatCompletionResponseChoiceFinishReason.Stop:
31+
{
32+
// Add the assistant message to the conversation history.
33+
messages.Add(chatCompletion.Choices[0].Message.AsRequestMessage());
34+
break;
35+
}
36+
37+
case CreateChatCompletionResponseChoiceFinishReason.ToolCalls:
38+
{
39+
// First, add the assistant message with tool calls to the conversation history.
40+
messages.Add(chatCompletion.Choices[0].Message.AsRequestMessage());
41+
42+
// Then, add a new tool message for each tool call that is resolved.
43+
foreach (ChatCompletionMessageToolCall toolCall in chatCompletion.Choices[0].Message.ToolCalls ?? [])
44+
{
45+
var json = await service.CallAsync(
46+
functionName: toolCall.Function.Name,
47+
argumentsAsJson: toolCall.Function.Arguments);
48+
messages.Add(json.AsToolMessage(toolCall.Id));
49+
}
50+
51+
requiresAction = true;
52+
break;
53+
}
54+
55+
case CreateChatCompletionResponseChoiceFinishReason.Length:
56+
throw new NotImplementedException("Incomplete model output due to MaxTokens parameter or token limit exceeded.");
57+
58+
case CreateChatCompletionResponseChoiceFinishReason.ContentFilter:
59+
throw new NotImplementedException("Omitted content due to a content filter flag.");
60+
61+
case CreateChatCompletionResponseChoiceFinishReason.FunctionCall:
62+
throw new NotImplementedException("Deprecated in favor of tool calls.");
63+
64+
default:
65+
throw new NotImplementedException(chatCompletion.Choices[0].FinishReason.ToString());
66+
}
67+
} while (requiresAction);
68+
69+
foreach (ChatCompletionRequestMessage requestMessage in messages)
70+
{
71+
if (requestMessage.System is { } systemMessage)
72+
{
73+
Console.WriteLine($"[SYSTEM]:");
74+
Console.WriteLine($"{systemMessage.Content.Value1}");
75+
Console.WriteLine();
76+
break;
77+
}
78+
else if (requestMessage.User is { } userMessage)
79+
{
80+
Console.WriteLine($"[USER]:");
81+
Console.WriteLine($"{userMessage.Content.Value1}");
82+
Console.WriteLine();
83+
}
84+
else if (requestMessage.Assistant is { Content: not null } assistantMessage)
85+
{
86+
Console.WriteLine($"[ASSISTANT]:");
87+
Console.WriteLine($"{assistantMessage.Content?.Value1}");
88+
Console.WriteLine();
89+
}
90+
else if (requestMessage.Tool is { } toolMessage)
91+
{
92+
// Do not print any tool messages; let the assistant summarize the tool results instead.
93+
break;
94+
95+
}
96+
}
97+
}
98+
}

0 commit comments

Comments
 (0)