Skip to content

Commit d893a79

Browse files
authored
Add initial implementation of McpTool to Responses (#656)
* Added initial support for integrating with remote MCP servers via the Responses API. * Users can add the `McpTool` to the `Tools` property of their `ResponseCreationOptions` and configure it. * Use the `AllowedTools` property to limit which of the server tools can be called by the model. * Use the `ToolCallApprovalPolicy` property to specify which tools require an explicit approval before being called by the model. * Support for selecting the `McpTool` via the `ToolChoice` property is coming soon. * Support for configuring the `McpTool` with an access token that can be used to authenticate with the remote MCP server is coming soon. * Support for connectors is coming soon.
1 parent 8c2c247 commit d893a79

File tree

115 files changed

+13250
-10559
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

115 files changed

+13250
-10559
lines changed

.github/workflows/live-test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ jobs:
3232
- name: Run live tests
3333
run: dotnet test ./tests/OpenAI.Tests.csproj
3434
--configuration Release
35-
--filter="TestCategory!=Smoke&TestCategory!=Assistants&TestCategory!=StoredChat&TestCategory!=Images&TestCategory!=Uploads&TestCategory!=Moderations&TestCategory!=FineTuning&TestCategory!=Conversation&TestCategory!=Manual"
35+
--filter="TestCategory!=Smoke&TestCategory!=Assistants&TestCategory!=StoredChat&TestCategory!=Images&TestCategory!=Uploads&TestCategory!=Moderations&TestCategory!=FineTuning&TestCategory!=Conversation&TestCategory!=MCP&TestCategory!=Manual"
3636
--logger "trx;LogFilePrefix=live"
3737
--results-directory ${{github.workspace}}/artifacts/test-results
3838
${{ env.version_suffix_args}}

.github/workflows/release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ jobs:
5555
- name: Run Live Tests
5656
run: dotnet test ./tests/OpenAI.Tests.csproj
5757
--configuration Release
58-
--filter="TestCategory!=Smoke&TestCategory!=Assistants&TestCategory!=StoredChat&TestCategory!=Images&TestCategory!=Uploads&TestCategory!=Moderations&TestCategory!=FineTuning&TestCategory!=Conversation&TestCategory!=Manual"
58+
--filter="TestCategory!=Smoke&TestCategory!=Assistants&TestCategory!=StoredChat&TestCategory!=Images&TestCategory!=Uploads&TestCategory!=Moderations&TestCategory!=FineTuning&TestCategory!=Conversation&TestCategory!=MCP&TestCategory!=Manual"
5959
--logger "trx;LogFilePrefix=live"
6060
--results-directory ${{ github.workspace }}/artifacts/test-results
6161
${{ env.version_suffix_args }}

api/OpenAI.net8.0.cs

Lines changed: 186 additions & 2 deletions
Large diffs are not rendered by default.

api/OpenAI.netstandard2.0.cs

Lines changed: 168 additions & 2 deletions
Large diffs are not rendered by default.

examples/Responses/Example01_SimpleResponse.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ public partial class ResponseExamples
1313
[Test]
1414
public void Example01_SimpleResponse()
1515
{
16-
OpenAIResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
16+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
1717

1818
OpenAIResponse response = client.CreateResponse("Say 'this is a test.'");
1919

examples/Responses/Example01_SimpleResponseAsync.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ public partial class ResponseExamples
1414
[Test]
1515
public async Task Example01_SimpleResponseAsync()
1616
{
17-
OpenAIResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
17+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
1818

1919
OpenAIResponse response = await client.CreateResponseAsync("Say 'this is a test.'");
2020

examples/Responses/Example02_SimpleResponseStreaming.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ public partial class ResponseExamples
1414
[Test]
1515
public void Example02_SimpleResponseStreaming()
1616
{
17-
OpenAIResponseClient client = new(model: "gpt-4o-mini", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
17+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
1818

1919
CollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreaming("Say 'this is a test.'");
2020

examples/Responses/Example02_SimpleResponseStreamingAsync.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ public partial class ResponseExamples
1616
[Test]
1717
public async Task Example02_SimpleResponseStreamingAsync()
1818
{
19-
OpenAIResponseClient client = new(model: "gpt-4o-mini", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
19+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
2020

2121
AsyncCollectionResult<StreamingResponseUpdate> responseUpdates = client.CreateResponseStreamingAsync("Say 'this is a test.'");
2222

Lines changed: 165 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,165 @@
1+
using NUnit.Framework;
2+
using OpenAI.Responses;
3+
using System;
4+
using System.Collections.Generic;
5+
using System.Linq;
6+
using System.Text.Json;
7+
8+
namespace OpenAI.Examples;
9+
10+
// This example uses experimental APIs which are subject to change. To use experimental APIs,
11+
// please acknowledge their experimental status by suppressing the corresponding warning.
12+
#pragma warning disable OPENAI001
13+
14+
public partial class ResponseExamples
15+
{
16+
#region
17+
private static string GetCurrentLocation()
18+
{
19+
// Call the location API here.
20+
return "San Francisco";
21+
}
22+
23+
private static string GetCurrentWeather(string location, string unit = "celsius")
24+
{
25+
// Call the weather API here.
26+
return $"31 {unit}";
27+
}
28+
#endregion
29+
30+
#region
31+
private static readonly FunctionTool getCurrentLocationTool = ResponseTool.CreateFunctionTool(
32+
functionName: nameof(GetCurrentLocation),
33+
functionDescription: "Get the user's current location",
34+
functionParameters: null,
35+
strictModeEnabled: false
36+
);
37+
38+
private static readonly FunctionTool getCurrentWeatherTool = ResponseTool.CreateFunctionTool(
39+
functionName: nameof(GetCurrentWeather),
40+
functionDescription: "Get the current weather in a given location",
41+
functionParameters: BinaryData.FromBytes("""
42+
{
43+
"type": "object",
44+
"properties": {
45+
"location": {
46+
"type": "string",
47+
"description": "The city and state, e.g. Boston, MA"
48+
},
49+
"unit": {
50+
"type": "string",
51+
"enum": [ "celsius", "fahrenheit" ],
52+
"description": "The temperature unit to use. Infer this from the specified location."
53+
}
54+
},
55+
"required": [ "location" ]
56+
}
57+
"""u8.ToArray()),
58+
strictModeEnabled: false
59+
);
60+
#endregion
61+
62+
[Test]
63+
public void Example03_FunctionCalling()
64+
{
65+
OpenAIResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
66+
67+
List<ResponseItem> inputItems =
68+
[
69+
ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"),
70+
];
71+
72+
ResponseCreationOptions options = new()
73+
{
74+
Tools = { getCurrentLocationTool, getCurrentWeatherTool },
75+
};
76+
77+
PrintMessageItems(inputItems.OfType<MessageResponseItem>());
78+
79+
bool requiresAction;
80+
81+
do
82+
{
83+
requiresAction = false;
84+
OpenAIResponse response = client.CreateResponse(inputItems, options);
85+
86+
inputItems.AddRange(response.OutputItems);
87+
88+
foreach (ResponseItem outputItem in response.OutputItems)
89+
{
90+
if (outputItem is FunctionCallResponseItem functionCall)
91+
{
92+
switch (functionCall.FunctionName)
93+
{
94+
case nameof(GetCurrentLocation):
95+
{
96+
string functionOutput = GetCurrentLocation();
97+
inputItems.Add(new FunctionCallOutputResponseItem(functionCall.CallId, functionOutput));
98+
break;
99+
}
100+
101+
case nameof(GetCurrentWeather):
102+
{
103+
// The arguments that the model wants to use to call the function are specified as a
104+
// stringified JSON object based on the schema defined in the tool definition. Note that
105+
// the model may hallucinate arguments too. Consequently, it is important to do the
106+
// appropriate parsing and validation before calling the function.
107+
using JsonDocument argumentsJson = JsonDocument.Parse(functionCall.FunctionArguments);
108+
bool hasLocation = argumentsJson.RootElement.TryGetProperty("location", out JsonElement location);
109+
bool hasUnit = argumentsJson.RootElement.TryGetProperty("unit", out JsonElement unit);
110+
111+
if (!hasLocation)
112+
{
113+
throw new ArgumentNullException(nameof(location), "The location argument is required.");
114+
}
115+
116+
string functionOutput = hasUnit
117+
? GetCurrentWeather(location.GetString(), unit.GetString())
118+
: GetCurrentWeather(location.GetString());
119+
inputItems.Add(new FunctionCallOutputResponseItem(functionCall.CallId, functionOutput));
120+
break;
121+
}
122+
123+
default:
124+
{
125+
// Handle other unexpected calls.
126+
throw new NotImplementedException();
127+
}
128+
}
129+
130+
requiresAction = true;
131+
break;
132+
}
133+
}
134+
135+
PrintMessageItems(response.OutputItems.OfType<MessageResponseItem>());
136+
137+
} while (requiresAction);
138+
}
139+
140+
private void PrintMessageItems(IEnumerable<ResponseItem> messageItems)
141+
{
142+
foreach (MessageResponseItem messageItem in messageItems)
143+
{
144+
switch (messageItem.Role)
145+
{
146+
case MessageRole.User:
147+
Console.WriteLine($"[USER]:");
148+
Console.WriteLine($"{messageItem.Content[0].Text}");
149+
Console.WriteLine();
150+
break;
151+
152+
case MessageRole.Assistant:
153+
Console.WriteLine($"[ASSISTANT]:");
154+
Console.WriteLine($"{messageItem.Content[0].Text}");
155+
Console.WriteLine();
156+
break;
157+
158+
default:
159+
break;
160+
}
161+
}
162+
}
163+
}
164+
165+
#pragma warning restore OPENAI001
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
using NUnit.Framework;
2+
using OpenAI.Responses;
3+
using System;
4+
using System.Collections.Generic;
5+
using System.Linq;
6+
using System.Text.Json;
7+
using System.Threading.Tasks;
8+
9+
namespace OpenAI.Examples;
10+
11+
// This example uses experimental APIs which are subject to change. To use experimental APIs,
12+
// please acknowledge their experimental status by suppressing the corresponding warning.
13+
#pragma warning disable OPENAI001
14+
15+
public partial class ResponseExamples
16+
{
17+
// See Example03_FunctionCalling.cs for the tool and function definitions.
18+
19+
[Test]
20+
public async Task Example03_FunctionCallingAsync()
21+
{
22+
OpenAIResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
23+
24+
List<ResponseItem> inputItems =
25+
[
26+
ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"),
27+
];
28+
29+
ResponseCreationOptions options = new()
30+
{
31+
Tools = { getCurrentLocationTool, getCurrentWeatherTool },
32+
};
33+
34+
PrintMessageItems(inputItems.OfType<MessageResponseItem>());
35+
36+
bool requiresAction;
37+
38+
do
39+
{
40+
requiresAction = false;
41+
OpenAIResponse response = await client.CreateResponseAsync(inputItems, options);
42+
43+
inputItems.AddRange(response.OutputItems);
44+
45+
foreach (ResponseItem outputItem in response.OutputItems)
46+
{
47+
if (outputItem is FunctionCallResponseItem functionCall)
48+
{
49+
switch (functionCall.FunctionName)
50+
{
51+
case nameof(GetCurrentLocation):
52+
{
53+
string functionOutput = GetCurrentLocation();
54+
inputItems.Add(new FunctionCallOutputResponseItem(functionCall.CallId, functionOutput));
55+
break;
56+
}
57+
58+
case nameof(GetCurrentWeather):
59+
{
60+
// The arguments that the model wants to use to call the function are specified as a
61+
// stringified JSON object based on the schema defined in the tool definition. Note that
62+
// the model may hallucinate arguments too. Consequently, it is important to do the
63+
// appropriate parsing and validation before calling the function.
64+
using JsonDocument argumentsJson = JsonDocument.Parse(functionCall.FunctionArguments);
65+
bool hasLocation = argumentsJson.RootElement.TryGetProperty("location", out JsonElement location);
66+
bool hasUnit = argumentsJson.RootElement.TryGetProperty("unit", out JsonElement unit);
67+
68+
if (!hasLocation)
69+
{
70+
throw new ArgumentNullException(nameof(location), "The location argument is required.");
71+
}
72+
73+
string functionOutput = hasUnit
74+
? GetCurrentWeather(location.GetString(), unit.GetString())
75+
: GetCurrentWeather(location.GetString());
76+
inputItems.Add(new FunctionCallOutputResponseItem(functionCall.CallId, functionOutput));
77+
break;
78+
}
79+
80+
default:
81+
{
82+
// Handle other unexpected calls.
83+
throw new NotImplementedException();
84+
}
85+
}
86+
87+
requiresAction = true;
88+
break;
89+
}
90+
}
91+
92+
PrintMessageItems(response.OutputItems.OfType<MessageResponseItem>());
93+
94+
} while (requiresAction);
95+
}
96+
}
97+
98+
#pragma warning restore OPENAI001

0 commit comments

Comments
 (0)