diff --git a/.github/workflows/Build-Test-And-Deploy.yml b/.github/workflows/Build-Test-And-Deploy.yml index 3375413b..86ae8f04 100644 --- a/.github/workflows/Build-Test-And-Deploy.yml +++ b/.github/workflows/Build-Test-And-Deploy.yml @@ -147,14 +147,20 @@ jobs: emailsender-secret=keyvaultref:$KEYVAULTURI/secrets/authmessagesender-secretkey,identityref:$MANAGEDIDENTITYID emailsender-name=keyvaultref:$KEYVAULTURI/secrets/authmessagesender-sendfromname,identityref:$MANAGEDIDENTITYID \ emailsender-email=keyvaultref:$KEYVAULTURI/secrets/authmessagesender-sendfromemail,identityref:$MANAGEDIDENTITYID connectionstring=keyvaultref:$KEYVAULTURI/secrets/connectionstrings-essentialcsharpwebcontextconnection,identityref:$MANAGEDIDENTITYID \ captcha-sitekey=keyvaultref:$KEYVAULTURI/secrets/captcha-sitekey,identityref:$MANAGEDIDENTITYID captcha-secretkey=keyvaultref:$KEYVAULTURI/secrets/captcha-secretkey,identityref:$MANAGEDIDENTITYID \ - appinsights-connectionstring=keyvaultref:$KEYVAULTURI/secrets/applicationinsights-connectionstring,identityref:$MANAGEDIDENTITYID + appinsights-connectionstring=keyvaultref:$KEYVAULTURI/secrets/applicationinsights-connectionstring,identityref:$MANAGEDIDENTITYID \ + ai-endpoint=keyvaultref:$KEYVAULTURI/secrets/AIOptions--Endpoint,identityref:$MANAGEDIDENTITYID ai-apikey=keyvaultref:$KEYVAULTURI/secrets/AIOptions--ApiKey,identityref:$MANAGEDIDENTITYID \ + ai-vectordeployment=keyvaultref:$KEYVAULTURI/secrets/AIOptions--VectorGenerationDeploymentName,identityref:$MANAGEDIDENTITYID ai-chatdeployment=keyvaultref:$KEYVAULTURI/secrets/AIOptions--ChatDeploymentName,identityref:$MANAGEDIDENTITYID \ + ai-systemprompt=keyvaultref:$KEYVAULTURI/secrets/AIOptions--SystemPrompt,identityref:$MANAGEDIDENTITYID \ + postgres-vectorstore-connectionstring=keyvaultref:$KEYVAULTURI/secrets/connectionstrings--PostgresVectorDb,identityref:$MANAGEDIDENTITYID az containerapp update --name $CONTAINER_APP_NAME --resource-group $RESOURCEGROUP --replace-env-vars Authentication__github__clientId=secretref:github-clientid Authentication__github__clientSecret=secretref:github-clientsecret \ Authentication__microsoft__clientId=secretref:msft-clientid Authentication__microsoft__clientSecret=secretref:msft-clientsecret AuthMessageSender__ApiKey=secretref:emailsender-apikey AuthMessageSender__SecretKey=secretref:emailsender-secret \ AuthMessageSender__SendFromName=secretref:emailsender-name AuthMessageSender__SendFromEmail=secretref:emailsender-email ConnectionStrings__EssentialCSharpWebContextConnection=secretref:connectionstring ASPNETCORE_ENVIRONMENT=Staging \ - AZURE_CLIENT_ID=$AZURECLIENTID HCaptcha__SiteKey=secretref:captcha-sitekey HCaptcha__SecretKey=secretref:captcha-secretkey ApplicationInsights__ConnectionString=secretref:appinsights-connectionstring + AZURE_CLIENT_ID=$AZURECLIENTID HCaptcha__SiteKey=secretref:captcha-sitekey HCaptcha__SecretKey=secretref:captcha-secretkey ApplicationInsights__ConnectionString=secretref:appinsights-connectionstring \ + AIOptions__Endpoint=secretref:ai-endpoint AIOptions__ApiKey=secretref:ai-apikey AIOptions__VectorGenerationDeploymentName=secretref:ai-vectordeployment AIOptions__ChatDeploymentName=secretref:ai-chatdeployment \ + AIOptions__SystemPrompt=secretref:ai-systemprompt ConnectionStrings__PostgresVectorStore=secretref:postgres-vectorstore-connectionstring - name: Logout of Azure CLI - if: "always()" + if: always() uses: azure/CLI@v2 with: inlineScript: | @@ -233,14 +239,19 @@ jobs: emailsender-secret=keyvaultref:$KEYVAULTURI/secrets/authmessagesender-secretkey,identityref:$MANAGEDIDENTITYID emailsender-name=keyvaultref:$KEYVAULTURI/secrets/authmessagesender-sendfromname,identityref:$MANAGEDIDENTITYID \ emailsender-email=keyvaultref:$KEYVAULTURI/secrets/authmessagesender-sendfromemail,identityref:$MANAGEDIDENTITYID connectionstring=keyvaultref:$KEYVAULTURI/secrets/connectionstrings-essentialcsharpwebcontextconnection,identityref:$MANAGEDIDENTITYID \ captcha-sitekey=keyvaultref:$KEYVAULTURI/secrets/captcha-sitekey,identityref:$MANAGEDIDENTITYID captcha-secretkey=keyvaultref:$KEYVAULTURI/secrets/captcha-secretkey,identityref:$MANAGEDIDENTITYID \ - appinsights-connectionstring=keyvaultref:$KEYVAULTURI/secrets/applicationinsights-connectionstring,identityref:$MANAGEDIDENTITYID + appinsights-connectionstring=keyvaultref:$KEYVAULTURI/secrets/applicationinsights-connectionstring,identityref:$MANAGEDIDENTITYID \ + ai-endpoint=keyvaultref:$KEYVAULTURI/secrets/AIOptions--Endpoint,identityref:$MANAGEDIDENTITYID ai-apikey=keyvaultref:$KEYVAULTURI/secrets/AIOptions--ApiKey,identityref:$MANAGEDIDENTITYID \ + ai-vectordeployment=keyvaultref:$KEYVAULTURI/secrets/AIOptions--VectorGenerationDeploymentName,identityref:$MANAGEDIDENTITYID ai-chatdeployment=keyvaultref:$KEYVAULTURI/secrets/AIOptions--ChatDeploymentName,identityref:$MANAGEDIDENTITYID \ + ai-systemprompt=keyvaultref:$KEYVAULTURI/secrets/AIOptions--SystemPrompt,identityref:$MANAGEDIDENTITYID \ + postgres-vectorstore-connectionstring=keyvaultref:$KEYVAULTURI/secrets/connectionstrings--PostgresVectorDb,identityref:$MANAGEDIDENTITYID az containerapp update --name $CONTAINER_APP_NAME --resource-group $RESOURCEGROUP --replace-env-vars Authentication__github__clientId=secretref:github-clientid Authentication__github__clientSecret=secretref:github-clientsecret \ Authentication__microsoft__clientId=secretref:msft-clientid Authentication__microsoft__clientSecret=secretref:msft-clientsecret AuthMessageSender__ApiKey=secretref:emailsender-apikey AuthMessageSender__SecretKey=secretref:emailsender-secret \ AuthMessageSender__SendFromName=secretref:emailsender-name AuthMessageSender__SendFromEmail=secretref:emailsender-email ConnectionStrings__EssentialCSharpWebContextConnection=secretref:connectionstring ASPNETCORE_ENVIRONMENT=Production \ - AZURE_CLIENT_ID=$AZURECLIENTID HCaptcha__SiteKey=secretref:captcha-sitekey HCaptcha__SecretKey=secretref:captcha-secretkey ApplicationInsights__ConnectionString=secretref:appinsights-connectionstring + AZURE_CLIENT_ID=$AZURECLIENTID HCaptcha__SiteKey=secretref:captcha-sitekey HCaptcha__SecretKey=secretref:captcha-secretkey ApplicationInsights__ConnectionString=secretref:appinsights-connectionstring \ + AIOptions__Endpoint=secretref:ai-endpoint AIOptions__ApiKey=secretref:ai-apikey ConnectionStrings__PostgresVectorStore=secretref:postgres-vectorstore-connectionstring - name: Logout of Azure CLI - if: "always()" + if: always() uses: azure/CLI@v2 with: inlineScript: | diff --git a/Directory.Packages.props b/Directory.Packages.props index 30cf682f..cb713801 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -36,12 +36,19 @@ + + + + + + + - + \ No newline at end of file diff --git a/EssentialCSharp.Chat.Shared/EssentialCSharp.Chat.Common.csproj b/EssentialCSharp.Chat.Shared/EssentialCSharp.Chat.Common.csproj new file mode 100644 index 00000000..1bef1967 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/EssentialCSharp.Chat.Common.csproj @@ -0,0 +1,18 @@ + + + + net9.0 + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + diff --git a/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs b/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..a9bca7d6 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs @@ -0,0 +1,85 @@ +using Azure.AI.OpenAI; +using EssentialCSharp.Chat.Common.Services; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace EssentialCSharp.Chat.Common.Extensions; + +public static class ServiceCollectionExtensions +{ + /// + /// Adds Azure OpenAI and related AI services to the service collection + /// + /// The service collection to add services to + /// The AI configuration options + /// The PostgreSQL connection string for the vector store + /// The service collection for chaining + public static IServiceCollection AddAzureOpenAIServices(this IServiceCollection services, AIOptions aiOptions, string postgresConnectionString) + { + if (string.IsNullOrEmpty(aiOptions.Endpoint) || + string.IsNullOrEmpty(aiOptions.ApiKey)) + // Register Azure OpenAI services +#pragma warning disable SKEXP0010 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + services.AddAzureOpenAIEmbeddingGenerator( + aiOptions.VectorGenerationDeploymentName, + aiOptions.Endpoint, + aiOptions.ApiKey); + + services.AddAzureOpenAIChatClient( + aiOptions.ChatDeploymentName, + aiOptions.Endpoint, + aiOptions.ApiKey); + + services.AddSingleton(provider => + new AzureOpenAIClient(new Uri(aiOptions.Endpoint), new Azure.AzureKeyCredential(aiOptions.ApiKey))); + + // Register Azure OpenAI services + services.AddAzureOpenAIEmbeddingGenerator( + aiOptions.VectorGenerationDeploymentName, + aiOptions.Endpoint, + aiOptions.ApiKey); + + services.AddAzureOpenAIChatCompletion( + aiOptions.ChatDeploymentName, + aiOptions.Endpoint, + aiOptions.ApiKey); + + // Add PostgreSQL vector store + services.AddPostgresVectorStore(postgresConnectionString); + +#pragma warning restore SKEXP0010 + + // Register shared AI services + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + return services; + } + + /// + /// Adds Azure OpenAI and related AI services to the service collection using configuration + /// + /// The service collection to add services to + /// The configuration to read AIOptions from + /// The service collection for chaining + public static IServiceCollection AddAzureOpenAIServices(this IServiceCollection services, IConfiguration configuration) + { + // Configure AI options from configuration + services.Configure(configuration.GetSection("AIOptions")); + + var aiOptions = configuration.GetSection("AIOptions").Get(); + if (aiOptions == null) + { + throw new InvalidOperationException("AIOptions section is missing from configuration."); + } + + // Get PostgreSQL connection string using the standard method + var postgresConnectionString = configuration.GetConnectionString("PostgresVectorStore") ?? + throw new InvalidOperationException("Connection string 'PostgresVectorStore' not found."); + + return services.AddAzureOpenAIServices(aiOptions, postgresConnectionString); + } +} diff --git a/EssentialCSharp.Chat.Shared/Models/AIOptions.cs b/EssentialCSharp.Chat.Shared/Models/AIOptions.cs new file mode 100644 index 00000000..290b49ab --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Models/AIOptions.cs @@ -0,0 +1,29 @@ +namespace EssentialCSharp.Chat; + +public class AIOptions +{ + /// + /// The Azure OpenAI deployment name for text embedding generation. + /// + public string VectorGenerationDeploymentName { get; set; } = string.Empty; + + /// + /// The Azure OpenAI deployment name for chat completions. + /// + public string ChatDeploymentName { get; set; } = string.Empty; + + /// + /// The system prompt to use for the chat model. + /// + public string SystemPrompt { get; set; } = string.Empty; + + /// + /// The Azure OpenAI endpoint URL. + /// + public string Endpoint { get; set; } = string.Empty; + + /// + /// The API key for accessing Azure OpenAI services. + /// + public string ApiKey { get; set; } = string.Empty; +} diff --git a/EssentialCSharp.Chat.Shared/Models/BookContentChunk.cs b/EssentialCSharp.Chat.Shared/Models/BookContentChunk.cs new file mode 100644 index 00000000..e95625b5 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Models/BookContentChunk.cs @@ -0,0 +1,54 @@ +using Microsoft.Extensions.VectorData; + +namespace EssentialCSharp.Chat.Common.Models; + +/// +/// Represents a chunk of book content for vector search +/// +public sealed class BookContentChunk +{ + /// + /// Unique identifier for the chunk - serves as the vector store key + /// + [VectorStoreKey] + public string Id { get; set; } = string.Empty; + + /// + /// Original source file name + /// + [VectorStoreData] + public string FileName { get; set; } = string.Empty; + + /// + /// Heading or title of the markdown chunk + /// + [VectorStoreData] + public string Heading { get; set; } = string.Empty; + + /// + /// The actual markdown content text for this chunk + /// + [VectorStoreData] + public string ChunkText { get; set; } = string.Empty; + + /// + /// Chapter number extracted from filename (e.g., "Chapter01.md" -> 1) + /// + [VectorStoreData] + public int? ChapterNumber { get; set; } + + /// + /// SHA256 hash of the chunk content for change detection + /// + [VectorStoreData] + public string ContentHash { get; set; } = string.Empty; + + /// + /// Vector embedding for the chunk text - will be generated by embedding service + /// Using 1536 dimensions for Azure OpenAI text-embedding-3-small-v1 + /// Use CosineSimilarity distance function since we are using text-embedding-3 (https://platform.openai.com/docs/guides/embeddings#which-distance-function-should-i-use) + /// Postgres supports only Hnsw: https://learn.microsoft.com/en-us/semantic-kernel/concepts/vector-store-connectors/out-of-the-box-connectors/postgres-connector?pivots=programming-language-csharp&WT.mc_id=8B97120A00B57354 + /// + [VectorStoreVector(Dimensions: 1536, DistanceFunction = DistanceFunction.CosineSimilarity, IndexKind = IndexKind.Hnsw)] + public ReadOnlyMemory? TextEmbedding { get; set; } +} diff --git a/EssentialCSharp.Chat.Shared/Services/AIChatService.cs b/EssentialCSharp.Chat.Shared/Services/AIChatService.cs new file mode 100644 index 00000000..8721d966 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Services/AIChatService.cs @@ -0,0 +1,334 @@ +using Azure.AI.OpenAI; +using Microsoft.Extensions.Options; +using ModelContextProtocol.Client; +using ModelContextProtocol.Protocol; +using OpenAI.Responses; + +namespace EssentialCSharp.Chat.Common.Services; + +/// +/// Service for handling AI chat completions using the OpenAI Responses API +/// +public class AIChatService +{ + private readonly AIOptions _Options; + private readonly AzureOpenAIClient _AzureClient; + private readonly OpenAIResponseClient _ResponseClient; + private readonly AISearchService _SearchService; + + public AIChatService(IOptions options, AISearchService searchService, AzureOpenAIClient azureClient) + { + _Options = options.Value; + _SearchService = searchService; + + // Initialize Azure OpenAI client and get the Response Client from it + _AzureClient = azureClient; + + _ResponseClient = _AzureClient.GetOpenAIResponseClient(_Options.ChatDeploymentName); + } + + /// + /// Gets a single chat completion response with all optional features + /// + /// The user's input prompt + /// Optional system prompt to override the default + /// Previous response ID to maintain conversation context + /// Optional tools for the AI to use + /// Optional reasoning effort level for reasoning models + /// Enable vector search for contextual information + /// Cancellation token + /// The AI response text and response ID for conversation continuity + public async Task<(string response, string responseId)> GetChatCompletion( + string prompt, + string? systemPrompt = null, + string? previousResponseId = null, + IMcpClient? mcpClient = null, + IEnumerable? tools = null, + ResponseReasoningEffortLevel? reasoningEffortLevel = null, + bool enableContextualSearch = false, + CancellationToken cancellationToken = default) + { + var responseOptions = await CreateResponseOptionsAsync(previousResponseId, tools, reasoningEffortLevel, mcpClient: mcpClient, cancellationToken: cancellationToken); + var enrichedPrompt = await EnrichPromptWithContext(prompt, enableContextualSearch, cancellationToken); + return await GetChatCompletionCore(enrichedPrompt, responseOptions, systemPrompt, cancellationToken); + } + + /// + /// Gets a streaming chat completion response with all optional features + /// + /// The user's input prompt + /// Optional system prompt to override the default + /// Previous response ID to maintain conversation context + /// Optional tools for the AI to use + /// Optional reasoning effort level for reasoning models + /// Enable vector search for contextual information + /// Cancellation token + /// An async enumerable of response text chunks and final response ID + public async IAsyncEnumerable<(string text, string? responseId)> GetChatCompletionStream( + string prompt, + string? systemPrompt = null, + string? previousResponseId = null, + IMcpClient? mcpClient = null, + IEnumerable? tools = null, + ResponseReasoningEffortLevel? reasoningEffortLevel = null, + bool enableContextualSearch = false, + [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var responseOptions = await CreateResponseOptionsAsync(previousResponseId, tools, reasoningEffortLevel, mcpClient: mcpClient, cancellationToken: cancellationToken); + var enrichedPrompt = await EnrichPromptWithContext(prompt, enableContextualSearch, cancellationToken); + + // Construct the user input with system context if provided + var systemContext = systemPrompt ?? _Options.SystemPrompt; + + // Create the streaming response using the Responses API + List responseItems = [ResponseItem.CreateUserMessageItem(enrichedPrompt)]; + if (systemContext is not null) + { + responseItems.Add( + ResponseItem.CreateSystemMessageItem(systemContext)); + } + var streamingUpdates = _ResponseClient.CreateResponseStreamingAsync( + responseItems, + options: responseOptions, + cancellationToken: cancellationToken); + + await foreach (var result in ProcessStreamingUpdatesAsync(streamingUpdates, responseOptions, mcpClient, cancellationToken)) + { + yield return result; + } + } + + /// + /// Enriches the user prompt with contextual information from vector search + /// + private async Task EnrichPromptWithContext(string prompt, bool enableContextualSearch, CancellationToken cancellationToken) + { + if (!enableContextualSearch) + { + return prompt; + } + + var searchResults = await _SearchService.ExecuteVectorSearch(prompt); + var contextualInfo = new System.Text.StringBuilder(); + + contextualInfo.AppendLine("## Contextual Information"); + contextualInfo.AppendLine("The following information might be relevant to your question:"); + contextualInfo.AppendLine(); + + await foreach (var result in searchResults) + { + contextualInfo.AppendLine(System.Globalization.CultureInfo.InvariantCulture, $"**From: {result.Record.Heading}**"); + contextualInfo.AppendLine(result.Record.ChunkText); + contextualInfo.AppendLine(); + } + + contextualInfo.AppendLine("## User Question"); + contextualInfo.AppendLine(prompt); + + return contextualInfo.ToString(); + } + + /// + /// Processes streaming updates from the OpenAI Responses API, handling both regular responses and function calls + /// + private async IAsyncEnumerable<(string text, string? responseId)> ProcessStreamingUpdatesAsync( + IAsyncEnumerable streamingUpdates, + ResponseCreationOptions responseOptions, + IMcpClient? mcpClient, + [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await foreach (var update in streamingUpdates.WithCancellation(cancellationToken)) + { + string? responseId; + if (update is StreamingResponseCreatedUpdate created) + { + // Remember the response ID for later function calls + responseId = created.Response.Id; + } + else if (update is StreamingResponseOutputItemDoneUpdate itemDone) + { + // Check if this is a function call that needs to be executed + if (itemDone.Item is FunctionCallResponseItem functionCallItem && mcpClient != null) + { + // Execute the function call and stream its response + await foreach (var functionResult in ExecuteFunctionCallAsync(functionCallItem, responseOptions, mcpClient, cancellationToken)) + { + if (functionResult.responseId != null) + { + responseId = functionResult.responseId; + } + yield return functionResult; + } + } + } + else if (update is StreamingResponseOutputTextDeltaUpdate deltaUpdate) + { + yield return (deltaUpdate.Delta.ToString(), null); + } + else if (update is StreamingResponseCompletedUpdate completedUpdate) + { + yield return (string.Empty, responseId: completedUpdate.Response.Id); // Signal completion with response ID + } + } + } + + /// + /// Executes a function call and streams the response + /// + private async IAsyncEnumerable<(string text, string? responseId)> ExecuteFunctionCallAsync( + FunctionCallResponseItem functionCallItem, + ResponseCreationOptions responseOptions, + IMcpClient mcpClient, + [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default) + { + // A dictionary of arguments to pass to the tool. Each key represents a parameter name, and its associated value represents the argument value. + Dictionary arguments = []; + // example JsonResponse: + // "{\"question\":\"Azure OpenAI Responses API (Preview)\"}" + var jsonResponse = functionCallItem.FunctionArguments.ToString(); + var jsonArguments = System.Text.Json.JsonSerializer.Deserialize>(jsonResponse) ?? new Dictionary(); + + // Convert JsonElement values to their actual types + foreach (var kvp in jsonArguments) + { + if (kvp.Value is System.Text.Json.JsonElement jsonElement) + { + arguments[kvp.Key] = jsonElement.ValueKind switch + { + System.Text.Json.JsonValueKind.String => jsonElement.GetString(), + System.Text.Json.JsonValueKind.Number => jsonElement.GetDecimal(), + System.Text.Json.JsonValueKind.True => true, + System.Text.Json.JsonValueKind.False => false, + System.Text.Json.JsonValueKind.Null => null, + _ => jsonElement.ToString() + }; + } + else + { + arguments[kvp.Key] = kvp.Value; + } + } + + // Execute the function call using the MCP client + var toolResult = await mcpClient.CallToolAsync( + functionCallItem.FunctionName, + arguments: arguments, + cancellationToken: cancellationToken); + + // Create input items with both the function call and the result + // This matches the Python pattern: append both tool_call and result + var inputItems = new List + { + functionCallItem, // The original function call + new FunctionCallOutputResponseItem(functionCallItem.CallId, string.Join("", toolResult.Content.Where(x => x.Type == "text").OfType().Select(x => x.Text))) + }; + + // Stream the function call response using the same processing logic + var functionResponseStream = _ResponseClient.CreateResponseStreamingAsync( + inputItems, + responseOptions, + cancellationToken); + + await foreach (var result in ProcessStreamingUpdatesAsync(functionResponseStream, responseOptions, mcpClient, cancellationToken)) + { + yield return result; + } + } + + /// + /// Creates response options with optional features + /// + private static async Task CreateResponseOptionsAsync( + string? previousResponseId = null, + IEnumerable? tools = null, + ResponseReasoningEffortLevel? reasoningEffortLevel = null, + IMcpClient? mcpClient = null, + CancellationToken cancellationToken = default + ) + { + var options = new ResponseCreationOptions(); + + // Add conversation context if available + if (!string.IsNullOrEmpty(previousResponseId)) + { + options.PreviousResponseId = previousResponseId; + } + + // Add tools if provided + if (tools != null) + { + foreach (var tool in tools) + { + options.Tools.Add(tool); + } + } + + if (mcpClient is not null) + { + await foreach (McpClientTool tool in mcpClient.EnumerateToolsAsync(cancellationToken: cancellationToken)) + { + options.Tools.Add(ResponseTool.CreateFunctionTool(tool.Name, tool.Description, BinaryData.FromString(tool.JsonSchema.GetRawText()))); + } + } + + // Add reasoning options if specified + if (reasoningEffortLevel.HasValue) + { + options.ReasoningOptions = new ResponseReasoningOptions() + { + ReasoningEffortLevel = reasoningEffortLevel.Value + }; + } + + return options; + } + + /// + /// Core method for getting chat completions with configurable response options + /// + private async Task<(string response, string responseId)> GetChatCompletionCore( + string prompt, + ResponseCreationOptions responseOptions, + string? systemPrompt = null, + CancellationToken cancellationToken = default) + { + // Construct the user input with system context if provided + var systemContext = systemPrompt ?? _Options.SystemPrompt; + + // Create the streaming response using the Responses API + List responseItems = [ResponseItem.CreateUserMessageItem(prompt)]; + if (systemContext is not null) + { + responseItems.Add( + ResponseItem.CreateSystemMessageItem(systemContext)); + } + + // Create the response using the Responses API + var response = await _ResponseClient.CreateResponseAsync( + responseItems, + options: responseOptions, + cancellationToken: cancellationToken); + + // Extract the message content and response ID + string responseText = string.Empty; + string responseId = response.Value.Id; + + foreach (var outputItem in response.Value.OutputItems) + { + if (outputItem is MessageResponseItem messageItem && + messageItem.Role == MessageRole.Assistant) + { + var textContent = messageItem.Content?.FirstOrDefault()?.Text; + if (!string.IsNullOrEmpty(textContent)) + { + responseText = textContent; + break; + } + } + } + + return (responseText, responseId); + } + + // TODO: Look into using UserSecurityContext (https://learn.microsoft.com/en-us/azure/defender-for-cloud/gain-end-user-context-ai) +} diff --git a/EssentialCSharp.Chat.Shared/Services/AISearchService.cs b/EssentialCSharp.Chat.Shared/Services/AISearchService.cs new file mode 100644 index 00000000..915d1dc4 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Services/AISearchService.cs @@ -0,0 +1,27 @@ +using EssentialCSharp.Chat.Common.Models; +using Microsoft.Extensions.VectorData; + +namespace EssentialCSharp.Chat.Common.Services; + +public class AISearchService(VectorStore vectorStore, EmbeddingService embeddingService) +{ + // TODO: Implement Hybrid Search functionality, may need to switch db providers to support full text search? + + public async Task>> ExecuteVectorSearch(string query, string? collectionName = null) + { + collectionName ??= EmbeddingService.CollectionName; + + VectorStoreCollection collection = vectorStore.GetCollection(collectionName); + + ReadOnlyMemory searchVector = await embeddingService.GenerateEmbeddingAsync(query); + + var vectorSearchOptions = new VectorSearchOptions + { + VectorProperty = x => x.TextEmbedding, + }; + + var searchResults = collection.SearchAsync(searchVector, options: vectorSearchOptions, top: 3); + + return searchResults; + } +} diff --git a/EssentialCSharp.Chat.Shared/Services/ChunkingResultExtensions.cs b/EssentialCSharp.Chat.Shared/Services/ChunkingResultExtensions.cs new file mode 100644 index 00000000..a350823a --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Services/ChunkingResultExtensions.cs @@ -0,0 +1,61 @@ +using System.Security.Cryptography; +using System.Text; +using EssentialCSharp.Chat.Common.Models; + +namespace EssentialCSharp.Chat.Common.Services; + +public static partial class ChunkingResultExtensions +{ + public static List ToBookContentChunks(this FileChunkingResult result) + { + var chunks = new List(); + int? chapterNumber = ExtractChapterNumber(result.FileName); + + foreach (var chunk in result.Chunks) + { + string chunkText = chunk; + string contentHash = ComputeSha256Hash(chunkText); + + chunks.Add(new BookContentChunk + { + Id = Guid.NewGuid().ToString(), + FileName = result.FileName, + Heading = ExtractHeading(chunkText), + ChunkText = chunkText, + ChapterNumber = chapterNumber, + ContentHash = contentHash + }); + } + return chunks; + } + + private static string ExtractHeading(string chunkText) + { + // get characters until the first " - " or newline + var firstLine = chunkText.Split(["\r\n", "\r", "\n"], StringSplitOptions.None)[0]; + var headingParts = firstLine.Split([" - "], StringSplitOptions.None); + return headingParts.Length > 0 ? headingParts[0].Trim() : string.Empty; + } + + private static int ExtractChapterNumber(string fileName) + { + // Example: "Chapter01.md" -> 1 + // Regex: Chapter(?[0-9]{2}) + var match = ChapterNumberRegex().Match(fileName); + if (match.Success && int.TryParse(match.Groups["ChapterNumber"].Value, out int chapterNumber)) + + { + return chapterNumber; + } + throw new InvalidOperationException($"File name '{fileName}' does not contain a valid chapter number in the expected format."); + } + + private static string ComputeSha256Hash(string text) + { + var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(text)); + return Convert.ToHexStringLower(bytes); + } + + [System.Text.RegularExpressions.GeneratedRegex(@"Chapter(?\d{2})")] + private static partial System.Text.RegularExpressions.Regex ChapterNumberRegex(); +} diff --git a/EssentialCSharp.Chat.Shared/Services/EmbeddingService.cs b/EssentialCSharp.Chat.Shared/Services/EmbeddingService.cs new file mode 100644 index 00000000..2d069318 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Services/EmbeddingService.cs @@ -0,0 +1,59 @@ +using EssentialCSharp.Chat.Common.Models; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.VectorData; + +namespace EssentialCSharp.Chat.Common.Services; + +/// +/// Service for generating embeddings for markdown chunks using Azure OpenAI +/// +public class EmbeddingService(VectorStore vectorStore, IEmbeddingGenerator> embeddingGenerator) +{ + public static string CollectionName { get; } = "markdown_chunks"; + + /// + /// Generate an embedding for the given text. + /// + /// The text to generate an embedding for. + /// The cancellation token. + /// A search vector as ReadOnlyMemory<float>. + public async Task> GenerateEmbeddingAsync(string text, CancellationToken cancellationToken = default) + { + var embedding = await embeddingGenerator.GenerateAsync(text, cancellationToken: cancellationToken); + return embedding.Vector; + } + + /// + /// Generate an embedding for each text paragraph and upload it to the specified collection. + /// + /// The name of the collection to upload the text paragraphs to. + /// An async task. + public async Task GenerateBookContentEmbeddingsAndUploadToVectorStore(IEnumerable bookContents, CancellationToken cancellationToken, string? collectionName = null) + { + collectionName ??= CollectionName; + + var collection = vectorStore.GetCollection(collectionName); + await collection.EnsureCollectionDeletedAsync(cancellationToken); + await collection.EnsureCollectionExistsAsync(cancellationToken); + + ParallelOptions parallelOptions = new() + { + MaxDegreeOfParallelism = 5, + CancellationToken = cancellationToken + }; + + int uploadedCount = 0; + + await Parallel.ForEachAsync(bookContents, parallelOptions, async (chunk, cancellationToken) => + { + // Generate the text embedding using the new method. + chunk.TextEmbedding = await GenerateEmbeddingAsync(chunk.ChunkText, cancellationToken); + + await collection.UpsertAsync(chunk, cancellationToken); + Console.WriteLine($"Uploaded chunk '{chunk.Id}' to collection '{collectionName}' for file '{chunk.FileName}' with heading '{chunk.Heading}'."); + + Interlocked.Increment(ref uploadedCount); + }); + Console.WriteLine($"Successfully generated embeddings and uploaded {uploadedCount} chunks to collection '{collectionName}'."); + } +} diff --git a/EssentialCSharp.Chat.Shared/Services/FileChunkingResult.cs b/EssentialCSharp.Chat.Shared/Services/FileChunkingResult.cs new file mode 100644 index 00000000..e2d0f40e --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Services/FileChunkingResult.cs @@ -0,0 +1,14 @@ +namespace EssentialCSharp.Chat.Common.Services; + +/// +/// Data structure to hold chunking results for a single file +/// +public class FileChunkingResult +{ + public string FileName { get; set; } = string.Empty; + public string FilePath { get; set; } = string.Empty; + public int OriginalCharCount { get; set; } + public int ChunkCount { get; set; } + public List Chunks { get; set; } = []; + public int TotalChunkCharacters { get; set; } +} diff --git a/EssentialCSharp.Chat.Shared/Services/MarkdownChunkingService.cs b/EssentialCSharp.Chat.Shared/Services/MarkdownChunkingService.cs new file mode 100644 index 00000000..d50ee214 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Services/MarkdownChunkingService.cs @@ -0,0 +1,180 @@ +using System.Text.RegularExpressions; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Text; + +namespace EssentialCSharp.Chat.Common.Services; + +/// +/// Markdown chunking service using Semantic Kernel's TextChunker +/// +public partial class MarkdownChunkingService( + ILogger logger, + int maxTokensPerChunk = 256, + int overlapTokens = 25) +{ + private static readonly string[] _NewLineSeparators = ["\r\n", "\n", "\r"]; + private readonly int _MaxTokensPerChunk = maxTokensPerChunk; + private readonly int _OverlapTokens = overlapTokens; + + /// + /// Process markdown files in the specified directory using Semantic Kernel's TextChunker + /// + public async Task> ProcessMarkdownFilesAsync( + DirectoryInfo directory, + string filePattern) + { + // Validate input parameters + if (!directory.Exists) + { + logger.LogError("Error: Directory {DirectoryName} does not exist.", directory.FullName); + throw new InvalidOperationException($"Error: Directory '{directory.FullName}' does not exist."); + } + + // Find markdown files + var markdownFiles = directory.GetFiles(filePattern, SearchOption.TopDirectoryOnly); + + if (markdownFiles.Length == 0) + { + throw new InvalidOperationException($"No files matching pattern '{filePattern}' found in '{directory.FullName}'"); + } + + Console.WriteLine($"Processing {markdownFiles.Length} markdown files..."); + + int totalChunks = 0; + var results = new List(); + + foreach (var file in markdownFiles) + { + string[] fileContent = await File.ReadAllLinesAsync(file.FullName); + var result = ProcessSingleMarkdownFile(fileContent, file.Name, file.FullName); + results.Add(result); + totalChunks += result.ChunkCount; + } + Console.WriteLine($"Processed {markdownFiles.Length} markdown files with a total of {totalChunks} chunks."); + + return results; + } + + /// + /// Process a single markdown file using Semantic Kernel's SplitMarkdownParagraphs method + /// + public FileChunkingResult ProcessSingleMarkdownFile( + string[] fileContent, string fileName, string filePath) + { + // Remove all multiple empty lines so there is no more than one empty line between paragraphs + string[] lines = [.. fileContent + .Select(line => line.Trim()) + .Where(line => !string.IsNullOrWhiteSpace(line))]; + + string content = string.Join(Environment.NewLine, lines); + + var sections = MarkdownContentToHeadersAndSection(content); + var allChunks = new List(); + int totalChunkCharacters = 0; + int chunkCount = 0; + + foreach (var (Header, Content) in sections) + { +#pragma warning disable SKEXP0050 + var chunks = TextChunker.SplitMarkdownParagraphs( + lines: Content, + maxTokensPerParagraph: _MaxTokensPerChunk, + overlapTokens: _OverlapTokens, + chunkHeader: Header + " - " + ); +#pragma warning restore SKEXP0050 + allChunks.AddRange(chunks); + chunkCount += chunks.Count; + totalChunkCharacters += chunks.Sum(c => c.Length); + } + + return new FileChunkingResult + { + FileName = fileName, + FilePath = filePath, + OriginalCharCount = content.Length, + ChunkCount = chunkCount, + Chunks = allChunks, + TotalChunkCharacters = totalChunkCharacters + }; + } + + /// + /// Convert markdown content into a list of headers and their associated content sections. + /// + /// + /// + public static List<(string Header, List Content)> MarkdownContentToHeadersAndSection(string content) + { + var lines = content.Split(_NewLineSeparators, StringSplitOptions.None); + var sections = new List<(string Header, List Content)>(); + var headerRegex = HeadingRegex(); + var listingPattern = ListingRegex(); + var headerStack = new List<(int Level, string Text)>(); + int i = 0; + while (i < lines.Length) + { + // Find next header + while (i < lines.Length && !headerRegex.IsMatch(lines[i])) + i++; + if (i >= lines.Length) break; + + var match = headerRegex.Match(lines[i]); + int level = match.Groups[1].Value.Length; + string headerText = match.Groups[2].Value.Trim(); + bool isListing = headerText.StartsWith("Listing", StringComparison.OrdinalIgnoreCase) && listingPattern.IsMatch(headerText); + + // If this is a listing header, append its content to the previous section + if (isListing && sections.Count > 0) + { + i++; // skip the listing header + var listingContent = new List(); + while (i < lines.Length && !headerRegex.IsMatch(lines[i])) + { + if (!string.IsNullOrWhiteSpace(lines[i])) + listingContent.Add(lines[i]); + i++; + } + // Append to previous section's content + var prev = sections[^1]; + prev.Content.AddRange(listingContent); + sections[^1] = prev; + continue; + } + + // Update header stack for non-listing headers + if (headerStack.Count == 0 || level > headerStack.Last().Level) + { + headerStack.Add((level, headerText)); + } + else + { + while (headerStack.Count > 0 && headerStack.Last().Level >= level) + headerStack.RemoveAt(headerStack.Count - 1); + headerStack.Add((level, headerText)); + } + i++; + + // Collect content until next header + var contentLines = new List(); + while (i < lines.Length && !headerRegex.IsMatch(lines[i])) + { + if (!string.IsNullOrWhiteSpace(lines[i])) + contentLines.Add(lines[i]); + i++; + } + + // Compose full header context + var fullHeader = string.Join(": ", headerStack.Select(h => h.Text)); + if (contentLines.Count > 0) + sections.Add((fullHeader, contentLines)); + } + return sections; + } + + [GeneratedRegex(@"^Listing \d+\.\d+(:.*)?$")] + private static partial Regex ListingRegex(); + + [GeneratedRegex(@"^(#{1,6}) +(.+)$")] + private static partial Regex HeadingRegex(); +} diff --git a/EssentialCSharp.Chat.Tests/EssentialCSharp.Chat.Tests.csproj b/EssentialCSharp.Chat.Tests/EssentialCSharp.Chat.Tests.csproj new file mode 100644 index 00000000..f1432132 --- /dev/null +++ b/EssentialCSharp.Chat.Tests/EssentialCSharp.Chat.Tests.csproj @@ -0,0 +1,24 @@ + + + + net9.0 + false + + + + + + + + + + + + + + + + + + + diff --git a/EssentialCSharp.Chat.Tests/MarkdownChunkingServiceTests.cs b/EssentialCSharp.Chat.Tests/MarkdownChunkingServiceTests.cs new file mode 100644 index 00000000..8aab8cb6 --- /dev/null +++ b/EssentialCSharp.Chat.Tests/MarkdownChunkingServiceTests.cs @@ -0,0 +1,192 @@ +using EssentialCSharp.Chat.Common.Services; +using Moq; + +namespace EssentialCSharp.Chat.Tests; +// TODO: Move to editorconfig later, just moving quick +#pragma warning disable CA1707 // Identifiers should not contain underscores +public class MarkdownChunkingServiceTests +{ + #region MarkdownContentToHeadersAndSection + [Fact] + public void MarkdownContentToHeadersAndSection_ParsesSampleMarkdown_CorrectlyCombinesHeadersAndExtractsContent() + { + string markdown = """ +### Beginner Topic +#### What Is a Method? + +Syntactically, a **method** in C# is a named block of code introduced by a method declaration (e.g., `static void Main()`) and (usually) followed by zero or more statements within curly braces. Methods perform computations and/or actions. Like paragraphs in written languages, methods provide a means of structuring and organizing code so that it is more readable. More important, methods can be reused and called from multiple places and so avoid the need to duplicate code. The method declaration introduces the method and defines the method name along with the data passed to and from the method. In Listing 1.8, `Main()` followed by `{ ... }` is an example of a C# method. + +## Main Method + +The location where C# programs begin execution is the **Main method**, which begins with `static void Main()`. When you execute the program by typing `dotnet run` on the terminal, the program starts with the Main method and begins executing the first statement, as identified in Listing 1.8. + + + +### Listing 1.8: Breaking Apart `HelloWorld` +publicclass Program // BEGIN Class definition +{ +publicstaticvoid Main() // Method declaration + { // BEGIN method implementation + Console.WriteLine( // This statement spans 2 lines +"Hello, My name is Inigo Montoya"); + } // END method implementation +} // END class definition +Although the Main method declaration can vary to some degree, `static` and the method name, `Main`, are always required for a program (see “Advanced Topic: Declaration of the Main Method”). + +The **comments**, text that begins with `//` in Listing 1.8, are explained later in the chapter. They are included to identify the various constructs in the listing. + +### Advanced Topic +#### Declaration of the Main Method + +C# requires that the Main method return either `void` or `int` and that it take either no parameters or a single array of strings. Listing 1.9 shows the full declaration of the Main method. The `args` parameter is an array of strings corresponding to the command-line arguments. The executable name is not included in the `args` array (unlike in C and C++). To retrieve the full command used to execute the program, including the program name, use `Environment.CommandLine`. +"""; + + var sections = MarkdownChunkingService.MarkdownContentToHeadersAndSection(markdown); + + Assert.Equal(3, sections.Count); + Assert.Contains(sections, s => s.Header == "Beginner Topic: What Is a Method?" && string.Join("\n", s.Content).Contains("Syntactically, a **method** in C# is a named block of code")); + Assert.Contains(sections, s => s.Header == "Main Method" && string.Join("\n", s.Content).Contains("The location where C# programs begin execution is the **Main method**, which begins with `static void Main()`") + && string.Join("\n", s.Content).Contains("publicclass Program")); + Assert.Contains(sections, s => s.Header == "Main Method: Advanced Topic: Declaration of the Main Method" && string.Join("\n", s.Content).Contains("C# requires that the Main method return either `void` or `int`")); + } + + [Fact] + public void MarkdownContentToHeadersAndSection_AppendsCodeListingToPriorSection() + { + string markdown = """ +## Working with Variables + +Now that you’ve been introduced to the most basic C# program, it’s time to declare a local variable. Once a variable is declared, you can assign it a value, replace that value with a new value, and use it in calculations, output, and so on. However, you cannot change the data type of the variable. In Listing 1.12, `string max` is a variable declaration. + + + +### Listing 1.12: Declaring and Assigning a Variable + +publicclass MiracleMax +{ +publicstaticvoid Main() + { +string max; // "string" identifies the data type +// "max" is the variable + max = "Have fun storming the castle!"; + Console.WriteLine(max); + } +} + +### Beginner Topic +#### Local Variables + +A **variable** is a name that refers to a value that can change over time. Local indicates that the programmer **declared** the variable within a method. + +To declare a variable is to define it, which you do by + +* Specifying the type of data which the variable will contain +* Assigning it an identifier (name) +"""; + + var sections = MarkdownChunkingService.MarkdownContentToHeadersAndSection(markdown); + + Assert.Equal(2, sections.Count); + // The code listing should be appended to the Working with Variables section, not as its own section + var workingWithVariablesSection = sections.FirstOrDefault(s => s.Header == "Working with Variables"); + Assert.True(!string.IsNullOrEmpty(workingWithVariablesSection.Header)); + Assert.Contains("publicclass MiracleMax", string.Join("\n", workingWithVariablesSection.Content)); + Assert.DoesNotContain(sections, s => s.Header == "Listing 1.12: Declaring and Assigning a Variable"); + } + + [Fact] + public void MarkdownContentToHeadersAndSection_KeepsPriorHeadersAppended() + { + string markdown = """ +### Beginner Topic +#### What Is a Data Type? + +The type of data that a variable declaration specifies is called a **data type** (or object type). A data type, or simply **type**, is a classification of things that share similar characteristics and behavior. For example, animal is a type. It classifies all things (monkeys, warthogs, and platypuses) that have animal characteristics (multicellular, capacity for locomotion, and so on). Similarly, in programming languages, a type is a definition for several items endowed with similar qualities. + +## Declaring a Variable + +In Listing 1.12, `string max` is a variable declaration of a string type whose name is `max`. It is possible to declare multiple variables within the same statement by specifying the data type once and separating each identifier with a comma. Listing 1.13 demonstrates such a declaration. + +### Listing 1.13: Declaring Two Variables within One Statement +string message1, message2; + +### Declaring another thing + +Because a multivariable declaration statement allows developers to provide the data type only once within a declaration, all variables will be of the same type. + +In C#, the name of the variable may begin with any letter or an underscore (`_`), followed by any number of letters, numbers, and/or underscores. By convention, however, local variable names are camelCased (the first letter in each word is capitalized, except for the first word) and do not include underscores. + +## Assigning a Variable + +After declaring a local variable, you must assign it a value before reading from it. One way to do this is to use the `=` **operator**, also known as the **simple assignment operator**. Operators are symbols used to identify the function the code is to perform. Listing 1.14 demonstrates how to use the assignment operator to designate the string values to which the variables `miracleMax` and `valerie` will point. + +### Listing 1.14: Changing the Value of a Variable +publicclass StormingTheCastle +{ +publicstaticvoid Main() + { +string valerie; +string miracleMax = "Have fun storming the castle!"; + + valerie = "Think it will work?"; + + Console.WriteLine(miracleMax); + Console.WriteLine(valerie); + + miracleMax = "It would take a miracle."; + Console.WriteLine(miracleMax); + } +} + +### Continued Learning +From this listing, observe that it is possible to assign a variable as part of the variable declaration (as it was for `miracleMax`) or afterward in a separate statement (as with the variable `valerie`). The value assigned must always be on the right side of the declaration. +"""; + + var sections = MarkdownChunkingService.MarkdownContentToHeadersAndSection(markdown); + Assert.Equal(5, sections.Count); + + Assert.Contains(sections, s => s.Header == "Beginner Topic: What Is a Data Type?" && string.Join("\n", s.Content).Contains("The type of data that a variable declaration specifies is called a **data type**")); + Assert.Contains(sections, s => s.Header == "Declaring a Variable" && string.Join("\n", s.Content).Contains("In Listing 1.12, `string max` is a variable declaration")); + Assert.Contains(sections, s => s.Header == "Declaring a Variable: Declaring another thing" && string.Join("\n", s.Content).Contains("Because a multivariable declaration statement allows developers to provide the data type only once")); + Assert.Contains(sections, s => s.Header == "Assigning a Variable" && string.Join("\n", s.Content).Contains("After declaring a local variable, you must assign it a value before reading from it.")); + Assert.Contains(sections, s => s.Header == "Assigning a Variable: Continued Learning" && string.Join("\n", s.Content).Contains("From this listing, observe that it is possible to assign a variable as part of the variable declaration")); + } + #endregion MarkdownContentToHeadersAndSection + + #region ProcessSingleMarkdownFile + [Fact] + public void ProcessSingleMarkdownFile_ProducesExpectedChunksAndHeaders() + { + // Arrange + var logger = new Mock>().Object; + var service = new MarkdownChunkingService(logger); + string[] fileContent = new[] + { + "## Section 1", + "This is the first section.", + "", + "### Listing 1.1: Example Listing", + "Console.WriteLine(\"Hello World\");", + "", + "## Section 2", + "This is the second section." + }; + string fileName = "TestFile.md"; + string filePath = "/path/to/TestFile.md"; + + // Act + var result = service.ProcessSingleMarkdownFile(fileContent, fileName, filePath); + + // Assert + Assert.NotNull(result); + Assert.Equal(fileName, result.FileName); + Assert.Equal(filePath, result.FilePath); + Assert.Contains("This is the first section.", string.Join("\n", result.Chunks)); + Assert.Contains("Console.WriteLine(\"Hello World\");", string.Join("\n", result.Chunks)); + Assert.Contains("This is the second section.", string.Join("\n", result.Chunks)); + Assert.Contains(result.Chunks, c => c.Contains("This is the second section.")); + } + #endregion ProcessSingleMarkdownFile +} + +#pragma warning restore CA1707 // Identifiers should not contain underscores diff --git a/EssentialCSharp.Chat/EssentialCSharp.Chat.csproj b/EssentialCSharp.Chat/EssentialCSharp.Chat.csproj new file mode 100644 index 00000000..1f027998 --- /dev/null +++ b/EssentialCSharp.Chat/EssentialCSharp.Chat.csproj @@ -0,0 +1,36 @@ + + + + Exe + net9.0 + 0.0.1 + + + + + + EssentialCSharp.Chat + true + essentialcsharpchat + + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + diff --git a/EssentialCSharp.Chat/Program.cs b/EssentialCSharp.Chat/Program.cs new file mode 100644 index 00000000..d2343950 --- /dev/null +++ b/EssentialCSharp.Chat/Program.cs @@ -0,0 +1,373 @@ +using System.CommandLine; +using System.Text.Json; +using EssentialCSharp.Chat.Common.Extensions; +using EssentialCSharp.Chat.Common.Services; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; + +namespace EssentialCSharp.Chat; + +public class Program +{ + private static readonly JsonSerializerOptions _JsonOptions = new() { WriteIndented = true }; + + static int Main(string[] args) + { + Option directoryOption = new("--directory") + { + Description = "Directory containing markdown files.", + Required = true + }; + Option filePatternOption = new("--file-pattern") + { + Description = "File pattern to match (e.g. *.md)", + DefaultValueFactory = _ => "*.md" + }; + Option outputDirectoryOption = new("--output-directory") + { + Description = "Directory to write chunked output files. If not provided, output is written to console.", + }; + + RootCommand rootCommand = new("EssentialCSharp.Chat Utilities"); + + var chunkMarkdownCommand = new Command("chunk-markdown", "Chunk markdown files in a directory.") + { + directoryOption, + filePatternOption, + outputDirectoryOption + }; + + var buildVectorDbCommand = new Command("build-vector-db", "Build a vector database from markdown chunks.") + { + directoryOption, + filePatternOption, + }; + + var chatCommand = new Command("chat", "Start an interactive AI chat session.") + { + new Option("--stream"), + new Option("--web-search"), + new Option("--contextual-search"), + new Option("--system-prompt") + }; + + buildVectorDbCommand.SetAction(async (ParseResult parseResult, CancellationToken cancellationToken) => + { + var config = CreateConfiguration(); + + var builder = Kernel.CreateBuilder(); + builder.Services.Configure(config.GetRequiredSection("AIOptions")); + + // Use shared extension to register Azure OpenAI services with configuration + builder.Services.AddAzureOpenAIServices(config); + + builder.Services.AddLogging(loggingBuilder => + { + loggingBuilder.AddSimpleConsole(options => + { + options.TimestampFormat = "HH:mm:ss "; + options.SingleLine = true; + }); + }); + + // Build the kernel and get the data uploader. + var kernel = builder.Build(); + var directory = parseResult.GetValue(directoryOption); + var filePattern = parseResult.GetValue(filePatternOption) ?? "*.md"; + var markdownService = kernel.GetRequiredService(); + if (directory is null) + { + Console.Error.WriteLine("Error: Directory is required."); + return; + } + var results = await markdownService.ProcessMarkdownFilesAsync(directory, filePattern); + // Convert results to BookContentChunks + var bookContentChunks = results.SelectMany(result => result.ToBookContentChunks()).ToList(); + // Generate embeddings and upload to vector store + var embeddingService = kernel.GetRequiredService(); + await embeddingService.GenerateBookContentEmbeddingsAndUploadToVectorStore(bookContentChunks, cancellationToken, "markdown_chunks"); + Console.WriteLine($"Successfully processed {bookContentChunks.Count} chunks."); + }); + + chatCommand.SetAction(async (ParseResult parseResult, CancellationToken cancellationToken) => + { + var config = CreateConfiguration(); + + // https://learn.microsoft.com/api/mcp + + //SseClientTransport microsoftLearnMcp = new SseClientTransport( + // new SseClientTransportOptions + // { + // Name = "Microsoft Learn MCP", + // Endpoint = new Uri("https://learn.microsoft.com/api/mcp"), + // }); + + //IMcpClient mcpClient = await McpClientFactory.CreateAsync(clientTransport: microsoftLearnMcp, cancellationToken: cancellationToken); + + var enableStreaming = parseResult.GetValue("--stream"); + var customSystemPrompt = parseResult.GetValue("--system-prompt"); + + + AIOptions aiOptions = config.GetRequiredSection("AIOptions").Get() ?? throw new InvalidOperationException( + "AIOptions section is missing or not configured correctly in appsettings.json or environment variables."); + + // Create service collection and register dependencies + var services = new ServiceCollection(); + services.Configure(config.GetRequiredSection("AIOptions")); + services.AddLogging(builder => builder.AddSimpleConsole(options => + { + options.TimestampFormat = "HH:mm:ss "; + options.SingleLine = true; + })); + + // Use shared extension to register Azure OpenAI services with configuration + services.AddAzureOpenAIServices(config); + + var serviceProvider = services.BuildServiceProvider(); + var aiChatService = serviceProvider.GetRequiredService(); + + Console.WriteLine("🤖 AI Chat Session Started!"); + Console.WriteLine("Features enabled:"); + Console.WriteLine($" • Streaming: {(enableStreaming ? "✅" : "❌")}"); + if (!string.IsNullOrEmpty(customSystemPrompt)) + Console.WriteLine($" • Custom System Prompt: {customSystemPrompt}"); + Console.WriteLine(); + Console.WriteLine("Commands:"); + Console.WriteLine(" • 'exit' or 'quit' - End the chat session"); + Console.WriteLine(" • 'clear' - Start a new conversation context"); + Console.WriteLine(" • 'help' - Show this help message"); + Console.WriteLine(" • 'history' - Show conversation history"); + Console.WriteLine(" • Any other text - Chat with the AI"); + Console.WriteLine("====================================="); + + // Track conversation context with response IDs + string? previousResponseId = null; + var conversationHistory = new List<(string Role, string Content)>(); + + while (!cancellationToken.IsCancellationRequested) + { + Console.WriteLine(); + Console.Write("👤 You: "); + var userInput = Console.ReadLine(); + + if (string.IsNullOrWhiteSpace(userInput)) + continue; + + userInput = userInput.Trim(); + + if (userInput.Equals("exit", StringComparison.OrdinalIgnoreCase) || + userInput.Equals("quit", StringComparison.OrdinalIgnoreCase)) + { + Console.WriteLine("Goodbye! 👋"); + break; + } + + if (userInput.Equals("clear", StringComparison.OrdinalIgnoreCase)) + { + // Reset conversation context when PreviousResponseId is implemented + previousResponseId = null; + conversationHistory.Clear(); + Console.WriteLine("🧹 Conversation context cleared. Starting fresh!"); + continue; + } + + if (userInput.Equals("help", StringComparison.OrdinalIgnoreCase)) + { + Console.WriteLine(); + Console.WriteLine("Commands:"); + Console.WriteLine(" • 'exit' or 'quit' - End the chat session"); + Console.WriteLine(" • 'clear' - Start a new conversation context"); + Console.WriteLine(" • 'help' - Show this help message"); + Console.WriteLine(" • 'history' - Show conversation history"); + Console.WriteLine(" • Any other text - Chat with the AI"); + continue; + } + + if (userInput.Equals("history", StringComparison.OrdinalIgnoreCase)) + { + Console.WriteLine(); + Console.WriteLine("📜 Conversation History:"); + if (conversationHistory.Count == 0) + { + Console.WriteLine(" No conversation history yet."); + } + else + { + for (int i = 0; i < conversationHistory.Count; i++) + { + var (role, content) = conversationHistory[i]; + var emoji = role == "User" ? "👤" : "🤖"; + Console.WriteLine($" {i + 1}. {emoji} {role}: {content}"); + } + } + continue; + } + + conversationHistory.Add(("User", userInput)); + + try + { + Console.Write("🤖 AI: "); + + if (enableStreaming) + { + // Use streaming with optional tools and conversation context + var fullResponse = new System.Text.StringBuilder(); + + await foreach (var (text, responseId) in aiChatService.GetChatCompletionStream( + prompt: userInput/*, mcpClient: mcpClient*/, previousResponseId: previousResponseId, systemPrompt: customSystemPrompt, cancellationToken: cancellationToken)) + { + if (!string.IsNullOrEmpty(text)) + { + Console.Write(text); + fullResponse.Append(text); + } + if (!string.IsNullOrEmpty(responseId)) + { + previousResponseId = responseId; // Update for next turn + } + } + Console.WriteLine(); + + conversationHistory.Add(("Assistant", fullResponse.ToString())); + } + else + { + // Non-streaming response with optional tools and conversation context + var (response, responseId) = await aiChatService.GetChatCompletion( + prompt: userInput, previousResponseId: previousResponseId, systemPrompt: customSystemPrompt, cancellationToken: cancellationToken); + + Console.WriteLine(response); + conversationHistory.Add(("Assistant", response)); + + if (!string.IsNullOrEmpty(responseId)) + { + previousResponseId = responseId; + } + } + + Console.WriteLine(); + } + catch (OperationCanceledException) + { + Console.WriteLine(); + Console.WriteLine("Operation cancelled. Goodbye! 👋"); + break; + } + catch (Exception ex) + { + Console.WriteLine(); + Console.WriteLine($"❌ Error: {ex.Message}"); + if (ex.InnerException != null) + { + Console.WriteLine($" Details: {ex.InnerException.Message}"); + } + } + } + }); + + chunkMarkdownCommand.SetAction(async parseResult => + { + var directory = parseResult.GetValue(directoryOption); + var filePattern = parseResult.GetValue(filePatternOption) ?? "*.md"; + var outputDirectory = parseResult.GetValue(outputDirectoryOption); + + using var loggerFactory = LoggerFactory.Create(builder => builder.AddSimpleConsole()); + var logger = loggerFactory.CreateLogger(); + var service = new MarkdownChunkingService(logger); + try + { + if (directory is null) + { + Console.Error.WriteLine("Error: Directory is required."); + return; + } + var results = await service.ProcessMarkdownFilesAsync(directory, filePattern); + + int maxChunkLength = 0; + int minChunkLength = 0; + + void WriteChunkingResult(FileChunkingResult result, TextWriter writer) + { + // lets build up some stats over the chunking + var chunkAverage = result.Chunks.Average(chunk => chunk.Length); + var chunkMedian = result.Chunks.OrderBy(chunk => chunk.Length).ElementAt(result.Chunks.Count / 2).Length; + var chunkMax = result.Chunks.Max(chunk => chunk.Length); + var chunkMin = result.Chunks.Min(chunk => chunk.Length); + var chunkTotal = result.Chunks.Sum(chunk => chunk.Length); + var chunkStandardDeviation = Math.Sqrt(result.Chunks.Average(chunk => Math.Pow(chunk.Length - chunkAverage, 2))); + var numberOfOutliers = result.Chunks.Count(chunk => chunk.Length > chunkAverage + chunkStandardDeviation); + + if (chunkMax > maxChunkLength) maxChunkLength = chunkMax; + if (chunkMin < minChunkLength || minChunkLength == 0) minChunkLength = chunkMin; + + writer.WriteLine($"File: {result.FileName}"); + writer.WriteLine($"Number of Chunks: {result.ChunkCount}"); + writer.WriteLine($"Average Chunk Length: {chunkAverage}"); + writer.WriteLine($"Median Chunk Length: {chunkMedian}"); + writer.WriteLine($"Max Chunk Length: {chunkMax}"); + writer.WriteLine($"Min Chunk Length: {chunkMin}"); + writer.WriteLine($"Total Chunk Characters: {chunkTotal}"); + writer.WriteLine($"Standard Deviation: {chunkStandardDeviation}"); + writer.WriteLine($"Number of Outliers: {numberOfOutliers}"); + writer.WriteLine($"Original Character Count: {result.OriginalCharCount}"); + writer.WriteLine($"New Character Count: {result.TotalChunkCharacters}"); + foreach (var chunk in result.Chunks) + { + writer.WriteLine(); + writer.WriteLine(chunk); + } + } + + if (outputDirectory != null) + { + if (!outputDirectory.Exists) + outputDirectory.Create(); + foreach (var result in results) + { + var outputFile = Path.Combine(outputDirectory.FullName, Path.GetFileNameWithoutExtension(result.FileName) + ".chunks.txt"); + using var writer = new StreamWriter(outputFile, false); + WriteChunkingResult(result, writer); + Console.WriteLine($"Wrote: {outputFile}"); + } + } + else + { + foreach (var result in results) + { + WriteChunkingResult(result, Console.Out); + } + } + Console.WriteLine($"Max Chunk Length: {maxChunkLength}"); + Console.WriteLine($"Min Chunk Length: {minChunkLength}"); + } + catch (Exception ex) + { + Console.Error.WriteLine($"Error: {ex.Message}"); + return; + } + }); + rootCommand.Subcommands.Add(chunkMarkdownCommand); + rootCommand.Subcommands.Add(buildVectorDbCommand); + rootCommand.Subcommands.Add(chatCommand); + + return rootCommand.Parse(args).Invoke(); + } + + /// + /// Creates and configures the IConfiguration used by multiple commands. + /// This method centralizes the common configuration setup to reduce code duplication. + /// + /// The configured IConfigurationRoot + private static IConfigurationRoot CreateConfiguration() + { + return new ConfigurationBuilder() + .SetBasePath(IntelliTect.Multitool.RepositoryPaths.GetDefaultRepoRoot()) + .AddJsonFile("EssentialCSharp.Web/appsettings.json") + .AddUserSecrets() + .AddEnvironmentVariables() + .Build(); + } +} diff --git a/EssentialCSharp.Chat/Properties/launchSettings.json b/EssentialCSharp.Chat/Properties/launchSettings.json new file mode 100644 index 00000000..cc4de81e --- /dev/null +++ b/EssentialCSharp.Chat/Properties/launchSettings.json @@ -0,0 +1,8 @@ +{ + "profiles": { + "EssentialCSharp.Chat": { + "commandName": "Project", + "commandLineArgs": "chat --stream --web-search" + } + } +} \ No newline at end of file diff --git a/EssentialCSharp.Chat/packages.config b/EssentialCSharp.Chat/packages.config deleted file mode 100644 index 61c14644..00000000 --- a/EssentialCSharp.Chat/packages.config +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/EssentialCSharp.Chat/requirements.txt b/EssentialCSharp.Chat/requirements.txt deleted file mode 100644 index 1ed83edc..00000000 --- a/EssentialCSharp.Chat/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -semantic-kernel==0.9.3b1 -mistune==3.0.1 \ No newline at end of file diff --git a/EssentialCSharp.Web.sln b/EssentialCSharp.Web.sln index 37de4f0c..17425b1a 100644 --- a/EssentialCSharp.Web.sln +++ b/EssentialCSharp.Web.sln @@ -21,6 +21,12 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "EssentialCSharp.Web", "Esse EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "EssentialCSharp.Web.Tests", "EssentialCSharp.Web.Tests\EssentialCSharp.Web.Tests.csproj", "{5717B439-2CFF-4BC5-A1DC-48BBF0FBE50F}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EssentialCSharp.Chat", "EssentialCSharp.Chat\EssentialCSharp.Chat.csproj", "{5D3487A4-F414-1A54-17CE-866AE6298BBD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EssentialCSharp.Chat.Common", "EssentialCSharp.Chat.Shared\EssentialCSharp.Chat.Common.csproj", "{1B9082D5-D325-42DB-9EC3-03A3953EA8EE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EssentialCSharp.Chat.Tests", "EssentialCSharp.Chat.Tests\EssentialCSharp.Chat.Tests.csproj", "{05CC9D8A-D928-4537-AD09-737C43DFC00D}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -35,6 +41,18 @@ Global {5717B439-2CFF-4BC5-A1DC-48BBF0FBE50F}.Debug|Any CPU.Build.0 = Debug|Any CPU {5717B439-2CFF-4BC5-A1DC-48BBF0FBE50F}.Release|Any CPU.ActiveCfg = Release|Any CPU {5717B439-2CFF-4BC5-A1DC-48BBF0FBE50F}.Release|Any CPU.Build.0 = Release|Any CPU + {5D3487A4-F414-1A54-17CE-866AE6298BBD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5D3487A4-F414-1A54-17CE-866AE6298BBD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5D3487A4-F414-1A54-17CE-866AE6298BBD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5D3487A4-F414-1A54-17CE-866AE6298BBD}.Release|Any CPU.Build.0 = Release|Any CPU + {1B9082D5-D325-42DB-9EC3-03A3953EA8EE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1B9082D5-D325-42DB-9EC3-03A3953EA8EE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1B9082D5-D325-42DB-9EC3-03A3953EA8EE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1B9082D5-D325-42DB-9EC3-03A3953EA8EE}.Release|Any CPU.Build.0 = Release|Any CPU + {05CC9D8A-D928-4537-AD09-737C43DFC00D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {05CC9D8A-D928-4537-AD09-737C43DFC00D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {05CC9D8A-D928-4537-AD09-737C43DFC00D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {05CC9D8A-D928-4537-AD09-737C43DFC00D}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/EssentialCSharp.Web/Controllers/ChatController.cs b/EssentialCSharp.Web/Controllers/ChatController.cs new file mode 100644 index 00000000..30459a86 --- /dev/null +++ b/EssentialCSharp.Web/Controllers/ChatController.cs @@ -0,0 +1,100 @@ +using System.Text.Json; +using EssentialCSharp.Chat.Common.Services; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.RateLimiting; + +namespace EssentialCSharp.Web.Controllers; + +[ApiController] +[Route("api/[controller]")] +[Authorize] +[EnableRateLimiting("ChatEndpoint")] +public class ChatController : ControllerBase +{ + private readonly AIChatService _AiChatService; + private readonly ILogger _Logger; + + public ChatController(ILogger logger, AIChatService aiChatService) + { + _AiChatService = aiChatService; + _Logger = logger; + } + + [HttpPost("message")] + public async Task SendMessage([FromBody] ChatMessageRequest request, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(request.Message)) + { + return BadRequest(new { error = "Message cannot be empty." }); + } + + // Require user authentication for chat + if (!User.Identity?.IsAuthenticated ?? true) + { + return Unauthorized(new { error = "User must be logged in to use chat." }); + } + + var (response, responseId) = await _AiChatService.GetChatCompletion( + prompt: request.Message, + systemPrompt: request.SystemPrompt, + previousResponseId: request.PreviousResponseId, + enableContextualSearch: request.EnableContextualSearch, + cancellationToken: cancellationToken); + + return Ok(new ChatMessageResponse + { + Response = response, + ResponseId = responseId, + Timestamp = DateTime.UtcNow + }); + } + + [HttpPost("stream")] + public async Task StreamMessage([FromBody] ChatMessageRequest request, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(request.Message)) + { + Response.StatusCode = 400; + await Response.WriteAsync(JsonSerializer.Serialize(new { error = "Message cannot be empty." }), cancellationToken); + return; + } + + // Require user authentication for chat + if (!User.Identity?.IsAuthenticated ?? true) + { + Response.StatusCode = 401; + await Response.WriteAsync(JsonSerializer.Serialize(new { error = "User must be logged in to use chat." }), cancellationToken); + return; + } + + Response.ContentType = "text/event-stream"; + Response.Headers.CacheControl = "no-cache"; + Response.Headers.Connection = "keep-alive"; + + await foreach (var (text, responseId) in _AiChatService.GetChatCompletionStream( + prompt: request.Message, + systemPrompt: request.SystemPrompt, + previousResponseId: request.PreviousResponseId, + enableContextualSearch: request.EnableContextualSearch, + cancellationToken: cancellationToken)) + { + if (!string.IsNullOrEmpty(text)) + { + var eventData = JsonSerializer.Serialize(new { type = "text", data = text }); + await Response.WriteAsync($"data: {eventData}\n\n", cancellationToken); + await Response.Body.FlushAsync(cancellationToken); + } + + if (!string.IsNullOrEmpty(responseId)) + { + var eventData = JsonSerializer.Serialize(new { type = "responseId", data = responseId }); + await Response.WriteAsync($"data: {eventData}\n\n", cancellationToken); + await Response.Body.FlushAsync(cancellationToken); + } + } + + await Response.WriteAsync("data: [DONE]\n\n", cancellationToken); + await Response.Body.FlushAsync(cancellationToken); + } +} diff --git a/EssentialCSharp.Web/Controllers/ChatMessageRequest.cs b/EssentialCSharp.Web/Controllers/ChatMessageRequest.cs new file mode 100644 index 00000000..5d7b3bea --- /dev/null +++ b/EssentialCSharp.Web/Controllers/ChatMessageRequest.cs @@ -0,0 +1,10 @@ +namespace EssentialCSharp.Web.Controllers; + +public class ChatMessageRequest +{ + public string Message { get; set; } = string.Empty; + public string? SystemPrompt { get; set; } + public string? PreviousResponseId { get; set; } + public bool EnableContextualSearch { get; set; } = true; + public string? CaptchaResponse { get; set; } // For future captcha implementation +} diff --git a/EssentialCSharp.Web/Controllers/ChatMessageResponse.cs b/EssentialCSharp.Web/Controllers/ChatMessageResponse.cs new file mode 100644 index 00000000..d1c237b1 --- /dev/null +++ b/EssentialCSharp.Web/Controllers/ChatMessageResponse.cs @@ -0,0 +1,8 @@ +namespace EssentialCSharp.Web.Controllers; + +public class ChatMessageResponse +{ + public string Response { get; set; } = string.Empty; + public string ResponseId { get; set; } = string.Empty; + public DateTime Timestamp { get; set; } +} diff --git a/EssentialCSharp.Web/EssentialCSharp.Web.csproj b/EssentialCSharp.Web/EssentialCSharp.Web.csproj index 2c2b6426..29e435b7 100644 --- a/EssentialCSharp.Web/EssentialCSharp.Web.csproj +++ b/EssentialCSharp.Web/EssentialCSharp.Web.csproj @@ -57,6 +57,10 @@ RemoveIdentityAssets + + + + diff --git a/EssentialCSharp.Web/Program.cs b/EssentialCSharp.Web/Program.cs index f0f807b6..1b83f672 100644 --- a/EssentialCSharp.Web/Program.cs +++ b/EssentialCSharp.Web/Program.cs @@ -1,4 +1,6 @@ +using System.Threading.RateLimiting; using Azure.Monitor.OpenTelemetry.AspNetCore; +using EssentialCSharp.Chat.Common.Extensions; using EssentialCSharp.Web.Areas.Identity.Data; using EssentialCSharp.Web.Areas.Identity.Services.PasswordValidators; using EssentialCSharp.Web.Data; @@ -11,6 +13,7 @@ using Microsoft.AspNetCore.HttpOverrides; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity.UI.Services; +using Microsoft.AspNetCore.RateLimiting; using Microsoft.EntityFrameworkCore; namespace EssentialCSharp.Web; @@ -39,8 +42,8 @@ private static void Main(string[] args) builder.Logging.AddConsole(); builder.Services.AddHealthChecks(); - // Create a temporary logger for startup logging - using var loggerFactory = LoggerFactory.Create(loggingBuilder => + // Create a logger that's accessible throughout the entire method + var loggerFactory = LoggerFactory.Create(loggingBuilder => loggingBuilder.AddConsole().SetMinimumLevel(LogLevel.Information)); var initialLogger = loggerFactory.CreateLogger(); @@ -93,6 +96,7 @@ private static void Main(string[] args) builder.Configuration .AddJsonFile("appsettings.json", optional: false, reloadOnChange: true) + .AddUserSecrets() .AddEnvironmentVariables(); builder.Services.ConfigureApplicationCookie(options => @@ -151,6 +155,91 @@ private static void Main(string[] args) builder.Services.AddHostedService(); builder.Services.AddScoped(); + // Add AI Chat services + if (!builder.Environment.IsDevelopment()) + { + builder.Services.AddAzureOpenAIServices(configuration); + } + + // Add Rate Limiting for API endpoints + builder.Services.AddRateLimiter(options => + { + // Global rate limiter for authenticated users by username, anonymous by IP + options.GlobalLimiter = PartitionedRateLimiter.Create(httpContext => + { + var partitionKey = httpContext.User.Identity?.IsAuthenticated == true + ? httpContext.User.Identity.Name ?? "unknown-user" + : httpContext.Connection.RemoteIpAddress?.ToString() ?? "unknown-ip"; + + return RateLimitPartition.GetFixedWindowLimiter( + partitionKey: partitionKey, + factory: _ => new FixedWindowRateLimiterOptions + { + PermitLimit = 30, // requests per window + Window = TimeSpan.FromMinutes(1), // minute window + QueueProcessingOrder = QueueProcessingOrder.OldestFirst, + QueueLimit = 0 // No queuing - immediate rejection for better UX + }); + }); + + options.AddFixedWindowLimiter("ChatEndpoint", rateLimiterOptions => + { + rateLimiterOptions.PermitLimit = 15; // chat messages per window (reasonable limit) + rateLimiterOptions.Window = TimeSpan.FromMinutes(1); // minute window + rateLimiterOptions.QueueProcessingOrder = QueueProcessingOrder.OldestFirst; + rateLimiterOptions.QueueLimit = 0; // No queuing to make rate limiting immediate + }); + + options.AddFixedWindowLimiter("Anonymous", rateLimiterOptions => + { + rateLimiterOptions.PermitLimit = 5; // requests per window for anonymous users + rateLimiterOptions.Window = TimeSpan.FromMinutes(1); + rateLimiterOptions.QueueProcessingOrder = QueueProcessingOrder.OldestFirst; + rateLimiterOptions.QueueLimit = 0; // No queuing for anonymous users + }); + + // Custom response when rate limit is exceeded + options.OnRejected = async (context, cancellationToken) => + { + if (context.HttpContext.Request.Path.StartsWithSegments("/.well-known")) + { + return; + } + context.HttpContext.Response.StatusCode = StatusCodes.Status429TooManyRequests; + context.HttpContext.Response.Headers.RetryAfter = "60"; + if (context.HttpContext.Request.Path.StartsWithSegments("/api/chat")) + { + // Custom rejection handling logic + context.HttpContext.Response.ContentType = "application/json"; + + var errorResponse = new + { + error = "Rate limit exceeded. Please wait before sending another message.", + retryAfter = 60, + requiresCaptcha = true, + statusCode = 429 + }; + + await context.HttpContext.Response.WriteAsync( + System.Text.Json.JsonSerializer.Serialize(errorResponse), + cancellationToken); + + // Optional logging + initialLogger.LogWarning("Rate limit exceeded for user: {User}, IP: {IpAddress}", + context.HttpContext.User.Identity?.Name ?? "anonymous", + context.HttpContext.Connection.RemoteIpAddress); + return; + } + + await context.HttpContext.Response.WriteAsync("Rate limit exceeded. Please try again later.", cancellationToken); + + // Optional logging + initialLogger.LogWarning("Rate limit exceeded for user: {User}, IP: {IpAddress}", + context.HttpContext.User.Identity?.Name ?? "anonymous", + context.HttpContext.Connection.RemoteIpAddress); + }; + }); + if (!builder.Environment.IsDevelopment()) { builder.Services.AddHttpClient(client => @@ -181,7 +270,7 @@ private static void Main(string[] args) }); } - + loggerFactory.Dispose(); WebApplication app = builder.Build(); // Configure the HTTP request pipeline. @@ -208,8 +297,10 @@ private static void Main(string[] args) app.UseAuthentication(); app.UseAuthorization(); - app.UseMiddleware(); + app.UseRateLimiter(); + + app.UseMiddleware(); app.MapRazorPages(); app.MapDefaultControllerRoute(); @@ -230,7 +321,7 @@ private static void Main(string[] args) var routeConfigurationService = app.Services.GetRequiredService(); SitemapXmlHelpers.EnsureSitemapHealthy(siteMappingService.SiteMappings.ToList()); - SitemapXmlHelpers.GenerateAndSerializeSitemapXml(wwwrootDirectory, siteMappingService.SiteMappings.ToList(), logger, routeConfigurationService, baseUrl); + SitemapXmlHelpers.GenerateAndSerializeSitemapXml(wwwrootDirectory, siteMappingService.SiteMappings.ToList(), initialLogger, routeConfigurationService, baseUrl); logger.LogInformation("Sitemap.xml generation completed successfully during application startup"); } catch (Exception ex) diff --git a/EssentialCSharp.Web/Services/CaptchaService.cs b/EssentialCSharp.Web/Services/CaptchaService.cs index 603e5a32..165a9611 100644 --- a/EssentialCSharp.Web/Services/CaptchaService.cs +++ b/EssentialCSharp.Web/Services/CaptchaService.cs @@ -24,8 +24,12 @@ public class CaptchaService(IHttpClientFactory clientFactory, IOptions VerifyAsync(string response) + public async Task VerifyAsync(string? response) { + if (string.IsNullOrWhiteSpace(response)) + { + return null; + } string secret = Options.SecretKey ?? throw new InvalidOperationException($"{CaptchaOptions.CaptchaSender} {nameof(Options.SecretKey)} is unexpectedly null"); string sitekey = Options.SiteKey ?? throw new InvalidOperationException($"{CaptchaOptions.CaptchaSender} {nameof(Options.SiteKey)} is unexpectedly null"); diff --git a/EssentialCSharp.Web/Services/ICaptchaService.cs b/EssentialCSharp.Web/Services/ICaptchaService.cs index 39c31b10..e3721c16 100644 --- a/EssentialCSharp.Web/Services/ICaptchaService.cs +++ b/EssentialCSharp.Web/Services/ICaptchaService.cs @@ -5,5 +5,5 @@ namespace EssentialCSharp.Web.Services; public interface ICaptchaService { Task VerifyAsync(string secret, string response, string sitekey); - Task VerifyAsync(string response); + Task VerifyAsync(string? response); } diff --git a/EssentialCSharp.Web/Views/Shared/_Layout.cshtml b/EssentialCSharp.Web/Views/Shared/_Layout.cshtml index 4f7d1198..e92bdb0c 100644 --- a/EssentialCSharp.Web/Views/Shared/_Layout.cshtml +++ b/EssentialCSharp.Web/Views/Shared/_Layout.cshtml @@ -3,20 +3,27 @@ @using EssentialCSharp.Web.Services @using IntelliTect.Multitool @using EssentialCSharp.Common +@using Microsoft.AspNetCore.Identity +@using EssentialCSharp.Web.Areas.Identity.Data +@using Microsoft.Extensions.Options @inject ISiteMappingService _SiteMappings +@inject SignInManager SignInManager +@inject IOptions CaptchaOptions @using Microsoft.AspNetCore.Components @{ var prodMap = new ImportMapDefinition( new Dictionary { - { "vue", "./lib/vue/dist/vue.esm-browser.prod.js" }, + { "vue", "https://cdn.jsdelivr.net/npm/vue@3.5.12/dist/vue.esm-browser.prod.js" }, { "vue-window-size", "./lib/vue-window-size/composition-api/dist/index.js" }, + { "vuetify", "https://cdn.jsdelivr.net/npm/vuetify@3.9.2/dist/vuetify.esm.js" }, }, null, null); var devMap = new ImportMapDefinition( new Dictionary { - { "vue", "./lib/vue/dist/vue.esm-browser.js" }, + { "vue", "https://cdn.jsdelivr.net/npm/vue@3.5.12/dist/vue.esm-browser.js" }, { "vue-window-size", "./lib/vue-window-size/composition-api/dist/index.js" }, + { "vuetify", "https://cdn.jsdelivr.net/npm/vuetify@3.9.2/dist/vuetify.esm.js" }, }, null, null); } @@ -43,8 +50,13 @@ + + + + + @*Font Family*@ @@ -74,6 +86,9 @@ })(window, document, "clarity", "script", "g4keetzd2o"); + + + @await RenderSectionAsync("HeadAppend", required: false) @@ -248,6 +265,238 @@ {{ snackbarMessage }} + + @if (!Context.Request.Path.StartsWithSegments("/Identity")) + { +
+ + + + + +
+ } + + +