Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,27 @@ public static IResourceBuilder<AIModel> AddAIModel(this IDistributedApplicationB
return builder.CreateResourceBuilder(model);
}

public static IResourceBuilder<AIModel> AddAIModel(this IDistributedApplicationBuilder builder, string name, IResourceBuilder<ParameterResource> endpoint, IResourceBuilder<ParameterResource> accessKey, IResourceBuilder<ParameterResource> model, IResourceBuilder<ParameterResource> provider)
{
var aiModel = new AIModel(name);
var resourceBuilder = builder.CreateResourceBuilder(aiModel);

resourceBuilder.Reset();

// See: https://github.com/dotnet/aspire/issues/7641
var csb = new ReferenceExpressionBuilder();
csb.Append($"Endpoint={endpoint.Resource};");
csb.Append($"AccessKey={accessKey.Resource};");
csb.Append($"Model={model.Resource};");
csb.Append($"Provider={provider.Resource};");
var cs = csb.Build();

resourceBuilder.Resource.UnderlyingResource = resourceBuilder.Resource;
resourceBuilder.Resource.ConnectionString = cs;

return resourceBuilder;
}

public static IResourceBuilder<AIModel> RunAsOpenAI(this IResourceBuilder<AIModel> builder, string modelName, IResourceBuilder<ParameterResource> apiKey)
{
if (builder.ApplicationBuilder.ExecutionContext.IsRunMode)
Expand Down Expand Up @@ -256,6 +277,11 @@ public ReferenceExpression Build()
{
var connectionString = this.ConnectionString ?? throw new InvalidOperationException("No connection string available.");

if (connectionString.ValueExpression.Contains("Provider="))
{
return connectionString;
}

if (this.Provider is null)
{
throw new InvalidOperationException("No provider configured.");
Expand Down
60 changes: 60 additions & 0 deletions dotnet/samples/AgentWebChat/AgentWebChat.AppHost/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,69 @@

var builder = DistributedApplication.CreateBuilder(args);

/* Setup for actual application using existing Azure OpenAI resources
var azOpenAiResource = builder.AddParameterFromConfiguration("AzureOpenAIName", "AzureOpenAI:Name");
var azOpenAiResourceGroup = builder.AddParameterFromConfiguration("AzureOpenAIResourceGroup", "AzureOpenAI:ResourceGroup");
var chatModel = builder.AddAIModel("chat-model").AsAzureOpenAI("gpt-4o", o => o.AsExisting(azOpenAiResource, azOpenAiResourceGroup));
*/

// Setup for local development/testing with multiple providers for Sample application
#pragma warning disable ASPIREINTERACTION001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
var endpoint = builder.AddParameter("Endpoint")
.WithCustomInput(parameter => new InteractionInput()
{
Name = parameter.Name,
InputType = InputType.Text,
Value = "NaN",
Label = parameter.Name,
Placeholder = "The endpoint for the provider if applicable",
Description = "Endpoint for the provider. **Note:** If not applicable, enter 'NaN'",
EnableDescriptionMarkdown = true
});
var accessKey = builder.AddParameter("AccessKey")
.WithCustomInput(parameter => new InteractionInput()
{
Name = parameter.Name,
InputType = InputType.SecretText,
Label = parameter.Name,
Value = "NaN",
Placeholder = "The accessKey or apiKey for the provider if applicable",
Description = "AccessKey or API key for the provider. **Note:** If not applicable, enter 'NaN'",
EnableDescriptionMarkdown = true
});
var model = builder.AddParameter("Model")
.WithCustomInput(parameter => new InteractionInput()
{
Name = parameter.Name,
InputType = InputType.Text,
Label = parameter.Name,
Placeholder = "The AI model"
});
var provider = builder.AddParameter("Provider")
.WithCustomInput(parameter => new InteractionInput()
{
Name = parameter.Name,
InputType = InputType.Choice,
Options = new[]
{
KeyValuePair.Create("AzureOpenAI", "AzureOpenAI"),
KeyValuePair.Create("OpenAI", "OpenAI Compatible"),
KeyValuePair.Create("Ollama", "Ollama"),
},
Label = parameter.Name,
Placeholder = "The provider for the AI model",
Description = """
**OpenAI Compatible**: Required `Model`, `Endpoint` and `AccessKey`

**AzureOpenAI**: Required `Model` and DefaultAzureCredentials will be used for authentication.

**Ollama**: Required `Model` and `Endpoint`
""",
EnableDescriptionMarkdown = true
});
#pragma warning restore ASPIREINTERACTION001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.

var chatModel = builder.AddAIModel("chat-model", endpoint, accessKey, model, provider);

var agentHost = builder.AddProject<Projects.AgentWebChat_AgentHost>("agenthost")
.WithHttpEndpoint(name: "devui")
Expand Down
94 changes: 94 additions & 0 deletions dotnet/samples/AgentWebChat/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
# AgentWebChat (Microsoft Agent Framework sample)

AgentWebChat is a small end-to-end sample that hosts AI agents in an ASP.NET Core service and chats with them from a simple web UI. The whole sample is orchestrated with **.NET Aspire** so you can run everything together and configure the model provider from the Aspire Dashboard.

## What’s in this sample

- **Agent host**: `AgentWebChat.AgentHost`
- Hosts multiple agents and workflows.
- Exposes:
- Agent discovery endpoint at `/agents`
- A2A endpoints (e.g., `/a2a/pirate`, `/a2a/knights-and-knaves`)
- OpenAI-compatible endpoints (Responses + Chat Completions)
- Dev UI at `/devui`
- **Web front end**: `AgentWebChat.Web`
- Blazor Server UI that can talk to the agent host.
- **Aspire AppHost**: `AgentWebChat.AppHost`
- Starts everything and surfaces **Provider/Model/Endpoint/AccessKey** as interactive parameters.

## Prerequisites

- The .NET SDK required by this repo (see `global.json` at the repo root).
- An AI provider you can access:
- **Ollama** running locally, or
- **OpenAI-compatible** endpoint (OpenAI / compatible gateway), or
- Azure OpenAI (see notes below).

## Run it (recommended: via Aspire)

From the `dotnet` folder:

```powershell
dotnet run --project .\samples\AgentWebChat\AgentWebChat.AppHost\AgentWebChat.AppHost.csproj
```

Or setup `AgentWebChat.AppHost` as your startup project in your IDE and run it.

Then:

1. Open the **Aspire Dashboard** (a link is printed in the console).
2. When prompted, provide the model configuration parameters (details below).
3. In the Dashboard, open the `webfrontend` resource endpoint to launch the chat UI.
4. Optional: open the `agenthost` resource endpoint to view:
- `/devui` (Dev UI)
- `/swagger` (OpenAPI UI)

## Configure model settings in the Aspire Dashboard

This sample uses Aspire **interactive parameters** (custom inputs) defined in `AgentWebChat.AppHost`.

When you run the AppHost, the Dashboard will prompt for:

- **Provider**: `OpenAI` (OpenAI-compatible), `AzureOpenAI`, or `Ollama`
- **Model**: model name (or deployment name, depending on provider)
- **Endpoint**: provider endpoint URL
- **AccessKey**: API key (secret)

Provider guidance:

- **Ollama**
- Endpoint: `http://localhost:11434`
- Model: e.g. `llama3.1`, `phi3.5`, etc.
- AccessKey: not used (you can enter any placeholder)

- **OpenAI (OpenAI-compatible)**
- Endpoint: e.g. `https://api.openai.com/v1/` (or your compatible gateway)
- AccessKey: your API key
- Model: e.g. `gpt-4o-mini` (whatever your endpoint supports)

- **Azure OpenAI**
- Model: deployment name (e.g. `gpt-4o-mini`)
- Authentication is handled using Default Azure Credentials

## What this sample demonstrates

- Using Microsoft Agent Framework in a **Web App**
- **Hosting agents** with Microsoft Agent Framework in ASP.NET Core (`builder.AddAIAgent(...)`).
- **Different interaction surfaces for the same agents**:
- A2A endpoints (`app.MapA2A(...)`)
- OpenAI-compatible endpoints (`app.MapOpenAIResponses()`, `app.MapOpenAIChatCompletions(...)`)
- Agent discovery (`app.MapAgentDiscovery("/agents")`)
- **Tools / function calling** by attaching custom AI tools to an agent.
- **Workflows** with multiple agents:
- Sequential workflows
- Concurrent workflows
- A multi-agent “Knights & Knaves” example.
- **In-memory thread storage** for quick local demos.

## If you’re new to Microsoft Agent Framework

A quick mental model:

- An **agent** is a LLM component with instructions (system prompt), state, and optional tools.
- A **tool** is a function the agent can call to do real work (your code).
- A **workflow** composes multiple agents (sequentially or concurrently) to solve a task.
Loading