Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.AI" Version="9.0.0-preview.9.24507.7" />
<PackageReference Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.1.0-preview.1.25064.3" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" Version="9.9.0-preview.1.25458.4" />
<PackageReference Include="Azure.AI.OpenAI" Version="2.1.0" />
<PackageReference Include="Azure.Identity" Version="1.13.1" />
<PackageReference Include="OllamaSharp" Version="5.3.6" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
using Microsoft.Extensions.AI;
using Azure.AI.OpenAI;
using Azure.Identity;
using OllamaSharp;
using System.Diagnostics;

// Define providers for evaluation
var providers = new Dictionary<string, IChatClient>
{
["Ollama-Llama3.1"] = new OllamaApiClient(new Uri("http://localhost:11434"), "llama3.1"),
["Azure-GPT4"] = new AzureOpenAIClient(
new Uri("YOUR-AZURE-OPENAI-ENDPOINT"),
new DefaultAzureCredential())
.GetChatClient("gpt-4")
.AsIChatClient(),
["Azure-GPT35"] = new AzureOpenAIClient(
new Uri("YOUR-AZURE-OPENAI-ENDPOINT"),
new DefaultAzureCredential())
.GetChatClient("gpt-35-turbo")
.AsIChatClient()
};

// Test prompts for evaluation
string[] testPrompts = [
"Explain quantum computing in simple terms.",
"Write a haiku about artificial intelligence.",
"What are the pros and cons of renewable energy?",
"How does machine learning work?",
"Describe the water cycle."
];

// Evaluation results
var results = new List<EvaluationResult>();

Console.WriteLine("Starting batch evaluation across providers...\n");

foreach (var prompt in testPrompts)
{
Console.WriteLine($"Evaluating prompt: \"{prompt}\"");
Console.WriteLine(new string('-', 50));

foreach (var (providerName, client) in providers)
{
try
{
var stopwatch = Stopwatch.StartNew();
var response = await client.GetResponseAsync(prompt);
stopwatch.Stop();

var result = new EvaluationResult
{
Prompt = prompt,
Provider = providerName,
Response = response.ToString(),
ResponseTime = stopwatch.Elapsed,
InputTokens = (int)(response.Usage?.InputTokenCount ?? 0),
OutputTokens = (int)(response.Usage?.OutputTokenCount ?? 0),
Success = true
};

results.Add(result);

Console.WriteLine($"{providerName}:");
Console.WriteLine($" Response time: {stopwatch.ElapsedMilliseconds}ms");
Console.WriteLine($" Tokens: {result.InputTokens} in, {result.OutputTokens} out");
Console.WriteLine($" Response: {response.ToString()[..Math.Min(response.ToString().Length, 100)]}...");
}
catch (Exception ex)
{
var result = new EvaluationResult
{
Prompt = prompt,
Provider = providerName,
Response = "",
ResponseTime = TimeSpan.Zero,
InputTokens = 0,
OutputTokens = 0,
Success = false,
ErrorMessage = ex.Message
};

results.Add(result);

Console.WriteLine($"{providerName}: ERROR - {ex.Message}");
}
}

Console.WriteLine();
}

// Generate summary report
Console.WriteLine("EVALUATION SUMMARY");
Console.WriteLine(new string('=', 50));

var groupedResults = results.GroupBy(r => r.Provider);

foreach (var group in groupedResults)
{
var successfulResults = group.Where(r => r.Success).ToList();

Console.WriteLine($"\n{group.Key}:");
Console.WriteLine($" Success rate: {successfulResults.Count}/{group.Count()} ({(double)successfulResults.Count / group.Count() * 100:F1}%)");

if (successfulResults.Any())
{
Console.WriteLine($" Avg response time: {successfulResults.Average(r => r.ResponseTime.TotalMilliseconds):F0}ms");
Console.WriteLine($" Avg output tokens: {successfulResults.Average(r => r.OutputTokens):F0}");
}
}

// Data model for evaluation results
public class EvaluationResult
{
public string Prompt { get; set; } = "";
public string Provider { get; set; } = "";
public string Response { get; set; } = "";
public TimeSpan ResponseTime { get; set; }
public int InputTokens { get; set; }
public int OutputTokens { get; set; }
public bool Success { get; set; }
public string? ErrorMessage { get; set; }
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.AI" Version="9.0.0-preview.9.24507.7" />
<PackageReference Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.1.0-preview.1.25064.3" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" Version="9.9.0-preview.1.25458.4" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="9.0.9" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="9.0.0" />
<PackageReference Include="Azure.AI.OpenAI" Version="2.1.0" />
<PackageReference Include="Azure.Identity" Version="1.13.1" />
<PackageReference Include="OllamaSharp" Version="5.3.6" />
</ItemGroup>

<ItemGroup>
<None Update="appsettings.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Configuration;
using Azure.AI.OpenAI;
using Azure.Identity;
using OllamaSharp;

// Build configuration
var configuration = new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true)
.AddEnvironmentVariables()
.Build();

// Get AI provider configuration
var aiConfig = configuration.GetSection("AIProviders");
var defaultProvider = aiConfig["DefaultProvider"] ?? "Ollama";

// Create provider factory
IChatClient CreateChatClient(string providerName)
{
var providerSection = aiConfig.GetSection($"Providers:{providerName}");
var providerType = providerSection["Type"];

return providerType switch
{
"Ollama" => new OllamaApiClient(
new Uri(providerSection["Endpoint"] ?? "http://localhost:11434"),
providerSection["Model"] ?? "llama3.1"),

"AzureOpenAI" => new AzureOpenAIClient(
new Uri(providerSection["Endpoint"] ?? throw new InvalidOperationException("Azure endpoint required")),
new DefaultAzureCredential())
.GetChatClient(providerSection["Model"] ?? "gpt-35-turbo")
.AsIChatClient(),

"OpenAI" => throw new NotImplementedException("OpenAI provider not implemented in this example"),

_ => throw new NotSupportedException($"Provider type '{providerType}' is not supported")
};
}

// Get the configured providers
var enabledProviders = aiConfig.GetSection("EnabledProviders").Get<string[]>() ?? [defaultProvider];

Console.WriteLine($"Default provider: {defaultProvider}");
Console.WriteLine($"Enabled providers: {string.Join(", ", enabledProviders)}");
Console.WriteLine();

// Test with each enabled provider
foreach (var providerName in enabledProviders)
{
try
{
Console.WriteLine($"Testing provider: {providerName}");

var client = CreateChatClient(providerName);
var response = await client.GetResponseAsync("What is artificial intelligence?");

Console.WriteLine($"Response from {providerName}: {response.ToString()[..Math.Min(response.ToString().Length, 100)]}...");
}
catch (Exception ex)
{
Console.WriteLine($"Error with provider {providerName}: {ex.Message}");
}

Console.WriteLine();
}

// Example of switching based on configuration flags
var useAdvancedFeatures = configuration.GetValue<bool>("AIProviders:UseAdvancedFeatures");
var providerForComplexQueries = useAdvancedFeatures ? "AzureOpenAI" : defaultProvider;

Console.WriteLine($"For complex queries, using: {providerForComplexQueries}");

var complexQueryClient = CreateChatClient(providerForComplexQueries);
var complexResponse = await complexQueryClient.GetResponseAsync(
"Analyze the impact of quantum computing on modern cryptography and suggest mitigation strategies.");

Console.WriteLine($"Complex query response: {complexResponse.ToString()[..Math.Min(complexResponse.ToString().Length, 150)]}...");
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"AIProviders": {
"DefaultProvider": "Ollama",
"UseAdvancedFeatures": false,
"EnabledProviders": [ "Ollama", "AzureOpenAI" ],
"Providers": {
"Ollama": {
"Type": "Ollama",
"Endpoint": "http://localhost:11434",
"Model": "llama3.1"
},
"AzureOpenAI": {
"Type": "AzureOpenAI",
"Endpoint": "https://your-resource.openai.azure.com/",
"Model": "gpt-35-turbo"
},
"AzureOpenAI-Advanced": {
"Type": "AzureOpenAI",
"Endpoint": "https://your-resource.openai.azure.com/",
"Model": "gpt-4"
}
}
},
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.AI" Version="9.0.0-preview.9.24507.7" />
<PackageReference Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.1.0-preview.1.25064.3" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" Version="9.9.0-preview.1.25458.4" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.1.25451.107" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="10.0.0-rc.1.25451.107" />
<PackageReference Include="Azure.AI.OpenAI" Version="2.1.0" />
<PackageReference Include="Azure.Identity" Version="1.13.1" />
<PackageReference Include="OllamaSharp" Version="5.3.6" />
</ItemGroup>

</Project>
Loading