Skip to content

Commit a632294

Browse files
committed
Merge branch 'main' of https://github.com/openai/openai-dotnet into shreja/AddJsonPath
2 parents 95f6e01 + 2d3f49a commit a632294

18 files changed

+429
-13
lines changed

codegen/generator/src/OpenAI.Library.Plugin.csproj

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
</PropertyGroup>
99

1010
<ItemGroup>
11-
<PackageReference Include="Microsoft.TypeSpec.Generator.ClientModel" Version="1.0.0-alpha.20250929.2" />
11+
<PackageReference Include="Microsoft.TypeSpec.Generator.ClientModel" Version="1.0.0-alpha.20251001.2" />
1212
</ItemGroup>
1313

1414
<!-- Copy output to package dist path for local execution and -->
@@ -26,3 +26,4 @@
2626

2727

2828

29+

codegen/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"dependencies": {
3131
"@open-ai/plugin": "file:",
3232
"@azure-tools/typespec-client-generator-core": "0.60.0",
33-
"@typespec/http-client-csharp": "1.0.0-alpha.20250929.2",
33+
"@typespec/http-client-csharp": "1.0.0-alpha.20251001.2",
3434
"@typespec/http": "1.4.0",
3535
"@typespec/openapi": "1.4.0"
3636
},
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
// SAMPLE: Analyzes image by passing a base64-encoded image through Responses API
2+
// PAGE: https://platform.openai.com/docs/guides/images-vision?api-mode=responses&format=base64-encoded#analyze-images
3+
// GUIDANCE: Instructions to run this code: https://aka.ms/oai/net/start
4+
#pragma warning disable OPENAI001
5+
6+
#:package OpenAI@2.*
7+
#:property PublishAot=false
8+
9+
using OpenAI.Responses;
10+
11+
string key = Environment.GetEnvironmentVariable("OPENAI_API_KEY")!;
12+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: key);
13+
14+
Uri imageUrl = new("https://openai-documentation.vercel.app/images/cat_and_otter.png");
15+
using HttpClient http = new();
16+
17+
// Download an image as stream
18+
using var stream = await http.GetStreamAsync(imageUrl);
19+
20+
OpenAIResponse response1 = (OpenAIResponse)client.CreateResponse([
21+
ResponseItem.CreateUserMessageItem([
22+
ResponseContentPart.CreateInputTextPart("What is in this image?"),
23+
ResponseContentPart.CreateInputImagePart(BinaryData.FromStream(stream), "image/png")
24+
])
25+
]);
26+
27+
Console.WriteLine($"From image stream: {response1.GetOutputText()}");
28+
29+
// Download an image as byte array
30+
byte[] bytes = await http.GetByteArrayAsync(imageUrl);
31+
32+
OpenAIResponse response2 = (OpenAIResponse)client.CreateResponse([
33+
ResponseItem.CreateUserMessageItem([
34+
ResponseContentPart.CreateInputTextPart("What is in this image?"),
35+
ResponseContentPart.CreateInputImagePart(BinaryData.FromBytes(bytes), "image/png")
36+
])
37+
]);
38+
39+
Console.WriteLine($"From byte array: {response2.GetOutputText()}");
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
// SAMPLE: Analyzes file from a file upload through Responses API
2+
// PAGE: https://platform.openai.com/docs/guides/images-vision?api-mode=responses&format=file#analyze-images
3+
// GUIDANCE: Instructions to run this code: https://aka.ms/oai/net/start
4+
#pragma warning disable OPENAI001
5+
6+
#:package OpenAI@2.*
7+
#:property PublishAot=false
8+
9+
using OpenAI.Files;
10+
using OpenAI.Responses;
11+
12+
string key = Environment.GetEnvironmentVariable("OPENAI_API_KEY")!;
13+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: key);
14+
15+
string filename = "cat_and_otter.png";
16+
Uri imageUrl = new($"https://openai-documentation.vercel.app/images/{filename}");
17+
using var http = new HttpClient();
18+
19+
// Download an image as stream
20+
using var stream = await http.GetStreamAsync(imageUrl);
21+
22+
OpenAIFileClient files = new(key);
23+
OpenAIFile file = await files.UploadFileAsync(BinaryData.FromStream(stream), filename, FileUploadPurpose.Vision);
24+
25+
OpenAIResponse response = (OpenAIResponse)client.CreateResponse([
26+
ResponseItem.CreateUserMessageItem([
27+
ResponseContentPart.CreateInputTextPart("what's in this image?"),
28+
ResponseContentPart.CreateInputImagePart(file.Id)
29+
])
30+
]);
31+
32+
Console.WriteLine(response.GetOutputText());
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
// SAMPLE: Analyzes image by passing an image URL through Responses API
2+
// PAGE: https://platform.openai.com/docs/guides/images-vision?api-mode=responses&format=url#analyze-images
3+
// GUIDANCE: Instructions to run this code: https://aka.ms/oai/net/start
4+
#pragma warning disable OPENAI001
5+
6+
#:package OpenAI@2.*
7+
#:property PublishAot=false
8+
9+
using OpenAI.Responses;
10+
11+
string key = Environment.GetEnvironmentVariable("OPENAI_API_KEY")!;
12+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: key);
13+
14+
Uri imageUrl = new("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg");
15+
16+
OpenAIResponse response = (OpenAIResponse)client.CreateResponse([
17+
ResponseItem.CreateUserMessageItem([
18+
ResponseContentPart.CreateInputTextPart("What is in this image?"),
19+
ResponseContentPart.CreateInputImagePart(imageUrl)
20+
])
21+
]);
22+
23+
Console.WriteLine(response.GetOutputText());
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
// SAMPLE: Generate response from remote MCP with approval through Responses API
2+
// PAGE: https://platform.openai.com/docs/guides/tools-connectors-mcp#approvals
3+
// GUIDANCE: Instructions to run this code: https://aka.ms/oai/net/start
4+
#pragma warning disable OPENAI001
5+
6+
#:package OpenAI@2.*
7+
#:property PublishAot=false
8+
9+
using OpenAI.Responses;
10+
11+
string key = Environment.GetEnvironmentVariable("OPENAI_API_KEY")!;
12+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: key);
13+
14+
ResponseCreationOptions options = new();
15+
options.Tools.Add(ResponseTool.CreateMcpTool(
16+
serverLabel: "dmcp",
17+
serverUri: new Uri("https://dmcp-server.deno.dev/sse"),
18+
toolCallApprovalPolicy: new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.AlwaysRequireApproval)
19+
));
20+
21+
// STEP 1: Create response that requests tool call approval
22+
OpenAIResponse response1 = (OpenAIResponse)client.CreateResponse([
23+
ResponseItem.CreateUserMessageItem([
24+
ResponseContentPart.CreateInputTextPart("Roll 2d4+1")
25+
])
26+
], options);
27+
28+
McpToolCallApprovalRequestItem? approvalRequestItem = response1.OutputItems.Last() as McpToolCallApprovalRequestItem;
29+
30+
// STEP 2: Approve the tool call request and get final response
31+
options.PreviousResponseId = response1.Id;
32+
OpenAIResponse response2 = (OpenAIResponse)client.CreateResponse([
33+
ResponseItem.CreateMcpApprovalResponseItem(approvalRequestItem!.Id, approved: true),
34+
], options);
35+
36+
Console.WriteLine(response2.GetOutputText());
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
// SAMPLE: Generate response from remote MCP with no approval through Responses API
2+
// PAGE: https://platform.openai.com/docs/guides/tools-connectors-mcp#approvals
3+
// GUIDANCE: Instructions to run this code: https://aka.ms/oai/net/start
4+
#pragma warning disable OPENAI001
5+
6+
#:package OpenAI@2.*
7+
#:property PublishAot=false
8+
9+
using OpenAI.Responses;
10+
11+
string key = Environment.GetEnvironmentVariable("OPENAI_API_KEY")!;
12+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: key);
13+
14+
ResponseCreationOptions options = new();
15+
options.Tools.Add(ResponseTool.CreateMcpTool(
16+
serverLabel: "deepwiki",
17+
serverUri: new Uri("https://mcp.deepwiki.com/mcp"),
18+
allowedTools: new McpToolFilter() { ToolNames = { "ask_question", "read_wiki_structure" } },
19+
toolCallApprovalPolicy: new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval)
20+
));
21+
22+
OpenAIResponse response = (OpenAIResponse)client.CreateResponse([
23+
ResponseItem.CreateUserMessageItem([
24+
ResponseContentPart.CreateInputTextPart("What transport protocols does the 2025-03-26 version of the MCP spec (modelcontextprotocol/modelcontextprotocol) support?")
25+
])
26+
], options);
27+
28+
Console.WriteLine(response.GetOutputText());
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
// SAMPLE: Generate response from remote MCP through Responses API
2+
// PAGE: https://platform.openai.com/docs/guides/tools-connectors-mcp#authentication
3+
// GUIDANCE: Instructions to run this code: https://aka.ms/oai/net/start
4+
#pragma warning disable OPENAI001
5+
6+
#:package OpenAI@2.*
7+
#:property PublishAot=false
8+
9+
using OpenAI.Responses;
10+
11+
string authToken = Environment.GetEnvironmentVariable("STRIPE_OAUTH_ACCESS_TOKEN")!;
12+
string key = Environment.GetEnvironmentVariable("OPENAI_API_KEY")!;
13+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: key);
14+
15+
ResponseCreationOptions options = new();
16+
options.Tools.Add(ResponseTool.CreateMcpTool(
17+
serverLabel: "stripe",
18+
serverUri: new Uri("https://mcp.stripe.com"),
19+
authorizationToken: authToken
20+
));
21+
22+
OpenAIResponse response = (OpenAIResponse)client.CreateResponse([
23+
ResponseItem.CreateUserMessageItem([
24+
ResponseContentPart.CreateInputTextPart("Create a payment link for $20")
25+
])
26+
], options);
27+
28+
Console.WriteLine(response.GetOutputText());
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
// SAMPLE: Generate response from a specific tool of a remote MCP through Responses API
2+
// PAGE: https://platform.openai.com/docs/guides/tools-connectors-mcp#filtering-tools
3+
// GUIDANCE: Instructions to run this code: https://aka.ms/oai/net/start
4+
#pragma warning disable OPENAI001
5+
6+
#:package OpenAI@2.*
7+
#:property PublishAot=false
8+
9+
using OpenAI.Responses;
10+
11+
string key = Environment.GetEnvironmentVariable("OPENAI_API_KEY")!;
12+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: key);
13+
14+
ResponseCreationOptions options = new();
15+
options.Tools.Add(ResponseTool.CreateMcpTool(
16+
serverLabel: "dmcp",
17+
serverUri: new Uri("https://dmcp-server.deno.dev/sse"),
18+
allowedTools: new McpToolFilter() { ToolNames = { "roll" } },
19+
toolCallApprovalPolicy: new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval)
20+
));
21+
22+
OpenAIResponse response = (OpenAIResponse)client.CreateResponse([
23+
ResponseItem.CreateUserMessageItem([
24+
ResponseContentPart.CreateInputTextPart("Roll 2d4+1")
25+
])
26+
], options);
27+
28+
Console.WriteLine(response.GetOutputText());
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
// SAMPLE: Generate response from remote MCP through Responses API
2+
// PAGE: https://platform.openai.com/docs/guides/tools-connectors-mcp?quickstart-panels=remote-mcp#quickstart
3+
// GUIDANCE: Instructions to run this code: https://aka.ms/oai/net/start
4+
#pragma warning disable OPENAI001
5+
6+
#:package OpenAI@2.*
7+
#:property PublishAot=false
8+
9+
using OpenAI.Responses;
10+
11+
string key = Environment.GetEnvironmentVariable("OPENAI_API_KEY")!;
12+
OpenAIResponseClient client = new(model: "gpt-5", apiKey: key);
13+
14+
ResponseCreationOptions options = new();
15+
options.Tools.Add(ResponseTool.CreateMcpTool(
16+
serverLabel: "dmcp",
17+
serverUri: new Uri("https://dmcp-server.deno.dev/sse"),
18+
toolCallApprovalPolicy: new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval)
19+
));
20+
21+
OpenAIResponse response = (OpenAIResponse)client.CreateResponse([
22+
ResponseItem.CreateUserMessageItem([
23+
ResponseContentPart.CreateInputTextPart("Roll 2d4+1")
24+
])
25+
], options);
26+
27+
Console.WriteLine(response.GetOutputText());

0 commit comments

Comments
 (0)