diff --git a/src/api/Elastic.Documentation.Api.Infrastructure/Adapters/AskAi/LlmGatewayAskAiGateway.cs b/src/api/Elastic.Documentation.Api.Infrastructure/Adapters/AskAi/LlmGatewayAskAiGateway.cs index 3430ecec5..e4f87cd88 100644 --- a/src/api/Elastic.Documentation.Api.Infrastructure/Adapters/AskAi/LlmGatewayAskAiGateway.cs +++ b/src/api/Elastic.Documentation.Api.Infrastructure/Adapters/AskAi/LlmGatewayAskAiGateway.cs @@ -25,7 +25,20 @@ public async Task AskAi(AskAiRequest askAiRequest, Cancel ctx = default) request.Headers.Add("User-Agent", "elastic-docs-proxy/1.0"); request.Headers.Add("Accept", "text/event-stream"); request.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); + + // Use HttpCompletionOption.ResponseHeadersRead to get headers immediately + // This allows us to start streaming as soon as headers are received var response = await httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, ctx); + + // Ensure the response is successful before streaming + if (!response.IsSuccessStatusCode) + { + var errorContent = await response.Content.ReadAsStringAsync(ctx); + throw new HttpRequestException($"LLM Gateway returned {response.StatusCode}: {errorContent}"); + } + + // Return the response stream directly - this enables true streaming + // The stream will be consumed as data arrives from the LLM Gateway return await response.Content.ReadAsStreamAsync(ctx); } } diff --git a/src/api/Elastic.Documentation.Api.Infrastructure/ServicesExtension.cs b/src/api/Elastic.Documentation.Api.Infrastructure/ServicesExtension.cs index 293547b92..f81ba3ae5 100644 --- a/src/api/Elastic.Documentation.Api.Infrastructure/ServicesExtension.cs +++ b/src/api/Elastic.Documentation.Api.Infrastructure/ServicesExtension.cs @@ -78,12 +78,23 @@ private static void AddParameterProvider(IServiceCollection services, AppEnv app case AppEnv.Edge: { logger?.LogInformation("Configuring LambdaExtensionParameterProvider for environment {AppEnvironment}", appEnv); - _ = services.AddHttpClient(LambdaExtensionParameterProvider.HttpClientName, client => + try { - client.BaseAddress = new Uri("http://localhost:2773"); - client.DefaultRequestHeaders.Add("X-Aws-Parameters-Secrets-Token", Environment.GetEnvironmentVariable("AWS_SESSION_TOKEN")); - }); - _ = services.AddSingleton(); + _ = services.AddHttpClient(LambdaExtensionParameterProvider.HttpClientName, client => + { + client.BaseAddress = new Uri("http://localhost:2773"); + client.DefaultRequestHeaders.Add("X-Aws-Parameters-Secrets-Token", Environment.GetEnvironmentVariable("AWS_SESSION_TOKEN")); + }); + logger?.LogInformation("Lambda extension HTTP client configured"); + + _ = services.AddSingleton(); + logger?.LogInformation("LambdaExtensionParameterProvider registered successfully"); + } + catch (Exception ex) + { + logger?.LogError(ex, "Failed to configure LambdaExtensionParameterProvider for environment {AppEnvironment}", appEnv); + throw; + } break; } case AppEnv.Dev: @@ -104,10 +115,26 @@ private static void AddAskAiUsecase(IServiceCollection services, AppEnv appEnv) { var logger = GetLogger(services); logger?.LogInformation("Configuring AskAi use case for environment {AppEnvironment}", appEnv); - _ = services.AddSingleton(); - _ = services.AddScoped(); - _ = services.AddScoped(); - _ = services.AddScoped, LlmGatewayAskAiGateway>(); + + try + { + _ = services.AddSingleton(); + logger?.LogInformation("GcpIdTokenProvider registered successfully"); + + _ = services.AddScoped(); + logger?.LogInformation("LlmGatewayOptions registered successfully"); + + _ = services.AddScoped(); + logger?.LogInformation("AskAiUsecase registered successfully"); + + _ = services.AddScoped, LlmGatewayAskAiGateway>(); + logger?.LogInformation("LlmGatewayAskAiGateway registered successfully"); + } + catch (Exception ex) + { + logger?.LogError(ex, "Failed to configure AskAi use case for environment {AppEnvironment}", appEnv); + throw; + } } private static void AddSearchUsecase(IServiceCollection services, AppEnv appEnv) { diff --git a/src/api/Elastic.Documentation.Api.Lambda/Program.cs b/src/api/Elastic.Documentation.Api.Lambda/Program.cs index 425438fb7..873aa435b 100644 --- a/src/api/Elastic.Documentation.Api.Lambda/Program.cs +++ b/src/api/Elastic.Documentation.Api.Lambda/Program.cs @@ -10,20 +10,52 @@ using Elastic.Documentation.Api.Infrastructure; using Elastic.Documentation.ServiceDefaults; -var builder = WebApplication.CreateSlimBuilder(args); +try +{ + var process = System.Diagnostics.Process.GetCurrentProcess(); + Console.WriteLine($"Starting Lambda application... Memory: {process.WorkingSet64 / 1024 / 1024} MB"); -builder.AddDocumentationServiceDefaults(ref args); + var builder = WebApplication.CreateSlimBuilder(args); + process.Refresh(); + Console.WriteLine($"WebApplication builder created. Memory: {process.WorkingSet64 / 1024 / 1024} MB"); -builder.Services.AddAWSLambdaHosting(LambdaEventSource.RestApi, new SourceGeneratorLambdaJsonSerializer()); -builder.Services.AddElasticDocsApiUsecases(Environment.GetEnvironmentVariable("ENVIRONMENT")); -builder.WebHost.UseKestrelHttpsConfiguration(); + _ = builder.AddDocumentationServiceDefaults(ref args); + process.Refresh(); + Console.WriteLine($"Documentation service defaults added. Memory: {process.WorkingSet64 / 1024 / 1024} MB"); -var app = builder.Build(); + _ = builder.Services.AddAWSLambdaHosting(LambdaEventSource.RestApi, new SourceGeneratorLambdaJsonSerializer()); + process.Refresh(); + Console.WriteLine($"AWS Lambda hosting configured. Memory: {process.WorkingSet64 / 1024 / 1024} MB"); -var v1 = app.MapGroup("/docs/_api/v1"); -v1.MapElasticDocsApiEndpoints(); + var environment = Environment.GetEnvironmentVariable("ENVIRONMENT"); + Console.WriteLine($"Environment: {environment}"); -app.Run(); + builder.Services.AddElasticDocsApiUsecases(environment); + process.Refresh(); + Console.WriteLine($"Elastic docs API use cases added. Memory: {process.WorkingSet64 / 1024 / 1024} MB"); + + _ = builder.WebHost.UseKestrelHttpsConfiguration(); + process.Refresh(); + Console.WriteLine($"Kestrel HTTPS configuration applied. Memory: {process.WorkingSet64 / 1024 / 1024} MB"); + + var app = builder.Build(); + process.Refresh(); + Console.WriteLine($"Application built successfully. Memory: {process.WorkingSet64 / 1024 / 1024} MB"); + + var v1 = app.MapGroup("/docs/_api/v1"); + v1.MapElasticDocsApiEndpoints(); + Console.WriteLine("API endpoints mapped"); + + Console.WriteLine("Application startup completed successfully"); + app.Run(); +} +catch (Exception ex) +{ + Console.WriteLine($"FATAL ERROR during startup: {ex}"); + Console.WriteLine($"Exception type: {ex.GetType().Name}"); + Console.WriteLine($"Stack trace: {ex.StackTrace}"); + throw; +} [JsonSerializable(typeof(APIGatewayProxyRequest))] [JsonSerializable(typeof(APIGatewayProxyResponse))]