Skip to content

Commit 17e5608

Browse files
Add complete switch model providers article with working code examples
Co-authored-by: luisquintanilla <[email protected]>
1 parent cdfe877 commit 17e5608

File tree

14 files changed

+717
-0
lines changed

14 files changed

+717
-0
lines changed
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
3+
<PropertyGroup>
4+
<OutputType>Exe</OutputType>
5+
<TargetFramework>net8.0</TargetFramework>
6+
<ImplicitUsings>enable</ImplicitUsings>
7+
<Nullable>enable</Nullable>
8+
</PropertyGroup>
9+
10+
<ItemGroup>
11+
<PackageReference Include="Microsoft.Extensions.AI" Version="9.0.0-preview.9.24507.7" />
12+
<PackageReference Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.1.0-preview.1.25064.3" />
13+
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" Version="9.9.0-preview.1.25458.4" />
14+
<PackageReference Include="Azure.AI.OpenAI" Version="2.1.0" />
15+
<PackageReference Include="Azure.Identity" Version="1.13.1" />
16+
<PackageReference Include="OllamaSharp" Version="5.3.6" />
17+
</ItemGroup>
18+
19+
</Project>
Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
using Microsoft.Extensions.AI;
2+
using Azure.AI.OpenAI;
3+
using Azure.Identity;
4+
using OllamaSharp;
5+
using System.Diagnostics;
6+
7+
// Define providers for evaluation
8+
var providers = new Dictionary<string, IChatClient>
9+
{
10+
["Ollama-Llama3.1"] = new OllamaApiClient(new Uri("http://localhost:11434"), "llama3.1"),
11+
["Azure-GPT4"] = new AzureOpenAIClient(
12+
new Uri("YOUR-AZURE-OPENAI-ENDPOINT"),
13+
new DefaultAzureCredential())
14+
.GetChatClient("gpt-4")
15+
.AsIChatClient(),
16+
["Azure-GPT35"] = new AzureOpenAIClient(
17+
new Uri("YOUR-AZURE-OPENAI-ENDPOINT"),
18+
new DefaultAzureCredential())
19+
.GetChatClient("gpt-35-turbo")
20+
.AsIChatClient()
21+
};
22+
23+
// Test prompts for evaluation
24+
string[] testPrompts = [
25+
"Explain quantum computing in simple terms.",
26+
"Write a haiku about artificial intelligence.",
27+
"What are the pros and cons of renewable energy?",
28+
"How does machine learning work?",
29+
"Describe the water cycle."
30+
];
31+
32+
// Evaluation results
33+
var results = new List<EvaluationResult>();
34+
35+
Console.WriteLine("Starting batch evaluation across providers...\n");
36+
37+
foreach (var prompt in testPrompts)
38+
{
39+
Console.WriteLine($"Evaluating prompt: \"{prompt}\"");
40+
Console.WriteLine(new string('-', 50));
41+
42+
foreach (var (providerName, client) in providers)
43+
{
44+
try
45+
{
46+
var stopwatch = Stopwatch.StartNew();
47+
var response = await client.GetResponseAsync(prompt);
48+
stopwatch.Stop();
49+
50+
var result = new EvaluationResult
51+
{
52+
Prompt = prompt,
53+
Provider = providerName,
54+
Response = response.ToString(),
55+
ResponseTime = stopwatch.Elapsed,
56+
InputTokens = (int)(response.Usage?.InputTokenCount ?? 0),
57+
OutputTokens = (int)(response.Usage?.OutputTokenCount ?? 0),
58+
Success = true
59+
};
60+
61+
results.Add(result);
62+
63+
Console.WriteLine($"{providerName}:");
64+
Console.WriteLine($" Response time: {stopwatch.ElapsedMilliseconds}ms");
65+
Console.WriteLine($" Tokens: {result.InputTokens} in, {result.OutputTokens} out");
66+
Console.WriteLine($" Response: {response.ToString()[..Math.Min(response.ToString().Length, 100)]}...");
67+
}
68+
catch (Exception ex)
69+
{
70+
var result = new EvaluationResult
71+
{
72+
Prompt = prompt,
73+
Provider = providerName,
74+
Response = "",
75+
ResponseTime = TimeSpan.Zero,
76+
InputTokens = 0,
77+
OutputTokens = 0,
78+
Success = false,
79+
ErrorMessage = ex.Message
80+
};
81+
82+
results.Add(result);
83+
84+
Console.WriteLine($"{providerName}: ERROR - {ex.Message}");
85+
}
86+
}
87+
88+
Console.WriteLine();
89+
}
90+
91+
// Generate summary report
92+
Console.WriteLine("EVALUATION SUMMARY");
93+
Console.WriteLine(new string('=', 50));
94+
95+
var groupedResults = results.GroupBy(r => r.Provider);
96+
97+
foreach (var group in groupedResults)
98+
{
99+
var successfulResults = group.Where(r => r.Success).ToList();
100+
101+
Console.WriteLine($"\n{group.Key}:");
102+
Console.WriteLine($" Success rate: {successfulResults.Count}/{group.Count()} ({(double)successfulResults.Count / group.Count() * 100:F1}%)");
103+
104+
if (successfulResults.Any())
105+
{
106+
Console.WriteLine($" Avg response time: {successfulResults.Average(r => r.ResponseTime.TotalMilliseconds):F0}ms");
107+
Console.WriteLine($" Avg output tokens: {successfulResults.Average(r => r.OutputTokens):F0}");
108+
}
109+
}
110+
111+
// Data model for evaluation results
112+
public class EvaluationResult
113+
{
114+
public string Prompt { get; set; } = "";
115+
public string Provider { get; set; } = "";
116+
public string Response { get; set; } = "";
117+
public TimeSpan ResponseTime { get; set; }
118+
public int InputTokens { get; set; }
119+
public int OutputTokens { get; set; }
120+
public bool Success { get; set; }
121+
public string? ErrorMessage { get; set; }
122+
}
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
3+
<PropertyGroup>
4+
<OutputType>Exe</OutputType>
5+
<TargetFramework>net8.0</TargetFramework>
6+
<ImplicitUsings>enable</ImplicitUsings>
7+
<Nullable>enable</Nullable>
8+
</PropertyGroup>
9+
10+
<ItemGroup>
11+
<PackageReference Include="Microsoft.Extensions.AI" Version="9.0.0-preview.9.24507.7" />
12+
<PackageReference Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.1.0-preview.1.25064.3" />
13+
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" Version="9.9.0-preview.1.25458.4" />
14+
<PackageReference Include="Microsoft.Extensions.Configuration" Version="9.0.0" />
15+
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="9.0.9" />
16+
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="9.0.0" />
17+
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="9.0.0" />
18+
<PackageReference Include="Azure.AI.OpenAI" Version="2.1.0" />
19+
<PackageReference Include="Azure.Identity" Version="1.13.1" />
20+
<PackageReference Include="OllamaSharp" Version="5.3.6" />
21+
</ItemGroup>
22+
23+
<ItemGroup>
24+
<None Update="appsettings.json">
25+
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
26+
</None>
27+
</ItemGroup>
28+
29+
</Project>
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
using Microsoft.Extensions.AI;
2+
using Microsoft.Extensions.Configuration;
3+
using Azure.AI.OpenAI;
4+
using Azure.Identity;
5+
using OllamaSharp;
6+
7+
// Build configuration
8+
var configuration = new ConfigurationBuilder()
9+
.SetBasePath(Directory.GetCurrentDirectory())
10+
.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true)
11+
.AddEnvironmentVariables()
12+
.Build();
13+
14+
// Get AI provider configuration
15+
var aiConfig = configuration.GetSection("AIProviders");
16+
var defaultProvider = aiConfig["DefaultProvider"] ?? "Ollama";
17+
18+
// Create provider factory
19+
IChatClient CreateChatClient(string providerName)
20+
{
21+
var providerSection = aiConfig.GetSection($"Providers:{providerName}");
22+
var providerType = providerSection["Type"];
23+
24+
return providerType switch
25+
{
26+
"Ollama" => new OllamaApiClient(
27+
new Uri(providerSection["Endpoint"] ?? "http://localhost:11434"),
28+
providerSection["Model"] ?? "llama3.1"),
29+
30+
"AzureOpenAI" => new AzureOpenAIClient(
31+
new Uri(providerSection["Endpoint"] ?? throw new InvalidOperationException("Azure endpoint required")),
32+
new DefaultAzureCredential())
33+
.GetChatClient(providerSection["Model"] ?? "gpt-35-turbo")
34+
.AsIChatClient(),
35+
36+
"OpenAI" => throw new NotImplementedException("OpenAI provider not implemented in this example"),
37+
38+
_ => throw new NotSupportedException($"Provider type '{providerType}' is not supported")
39+
};
40+
}
41+
42+
// Get the configured providers
43+
var enabledProviders = aiConfig.GetSection("EnabledProviders").Get<string[]>() ?? [defaultProvider];
44+
45+
Console.WriteLine($"Default provider: {defaultProvider}");
46+
Console.WriteLine($"Enabled providers: {string.Join(", ", enabledProviders)}");
47+
Console.WriteLine();
48+
49+
// Test with each enabled provider
50+
foreach (var providerName in enabledProviders)
51+
{
52+
try
53+
{
54+
Console.WriteLine($"Testing provider: {providerName}");
55+
56+
var client = CreateChatClient(providerName);
57+
var response = await client.GetResponseAsync("What is artificial intelligence?");
58+
59+
Console.WriteLine($"Response from {providerName}: {response.ToString()[..Math.Min(response.ToString().Length, 100)]}...");
60+
}
61+
catch (Exception ex)
62+
{
63+
Console.WriteLine($"Error with provider {providerName}: {ex.Message}");
64+
}
65+
66+
Console.WriteLine();
67+
}
68+
69+
// Example of switching based on configuration flags
70+
var useAdvancedFeatures = configuration.GetValue<bool>("AIProviders:UseAdvancedFeatures");
71+
var providerForComplexQueries = useAdvancedFeatures ? "AzureOpenAI" : defaultProvider;
72+
73+
Console.WriteLine($"For complex queries, using: {providerForComplexQueries}");
74+
75+
var complexQueryClient = CreateChatClient(providerForComplexQueries);
76+
var complexResponse = await complexQueryClient.GetResponseAsync(
77+
"Analyze the impact of quantum computing on modern cryptography and suggest mitigation strategies.");
78+
79+
Console.WriteLine($"Complex query response: {complexResponse.ToString()[..Math.Min(complexResponse.ToString().Length, 150)]}...");
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
{
2+
"AIProviders": {
3+
"DefaultProvider": "Ollama",
4+
"UseAdvancedFeatures": false,
5+
"EnabledProviders": [ "Ollama", "AzureOpenAI" ],
6+
"Providers": {
7+
"Ollama": {
8+
"Type": "Ollama",
9+
"Endpoint": "http://localhost:11434",
10+
"Model": "llama3.1"
11+
},
12+
"AzureOpenAI": {
13+
"Type": "AzureOpenAI",
14+
"Endpoint": "https://your-resource.openai.azure.com/",
15+
"Model": "gpt-35-turbo"
16+
},
17+
"AzureOpenAI-Advanced": {
18+
"Type": "AzureOpenAI",
19+
"Endpoint": "https://your-resource.openai.azure.com/",
20+
"Model": "gpt-4"
21+
}
22+
}
23+
},
24+
"Logging": {
25+
"LogLevel": {
26+
"Default": "Information",
27+
"Microsoft.AspNetCore": "Warning"
28+
}
29+
}
30+
}
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
3+
<PropertyGroup>
4+
<OutputType>Exe</OutputType>
5+
<TargetFramework>net8.0</TargetFramework>
6+
<ImplicitUsings>enable</ImplicitUsings>
7+
<Nullable>enable</Nullable>
8+
</PropertyGroup>
9+
10+
<ItemGroup>
11+
<PackageReference Include="Microsoft.Extensions.AI" Version="9.0.0-preview.9.24507.7" />
12+
<PackageReference Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.1.0-preview.1.25064.3" />
13+
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" Version="9.9.0-preview.1.25458.4" />
14+
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.1.25451.107" />
15+
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="10.0.0-rc.1.25451.107" />
16+
<PackageReference Include="Azure.AI.OpenAI" Version="2.1.0" />
17+
<PackageReference Include="Azure.Identity" Version="1.13.1" />
18+
<PackageReference Include="OllamaSharp" Version="5.3.6" />
19+
</ItemGroup>
20+
21+
</Project>

0 commit comments

Comments
 (0)