diff --git a/Cargo.lock b/Cargo.lock index 3f8e9b3..5dc129d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -153,6 +153,19 @@ dependencies = [ "wait-timeout", ] +[[package]] +name = "async-compression" +version = "0.4.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2" +dependencies = [ + "compression-codecs", + "compression-core", + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "atoi_simd" version = "0.16.1" @@ -573,6 +586,24 @@ dependencies = [ "memchr", ] +[[package]] +name = "compression-codecs" +version = "0.4.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b" +dependencies = [ + "brotli", + "compression-core", + "flate2", + "memchr", +] + +[[package]] +name = "compression-core" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a9b614a5787ef0c8802a55766480563cb3a93b435898c422ed2a359cf811582" + [[package]] name = "console" version = "0.15.11" @@ -1377,9 +1408,11 @@ name = "http-nu" version = "0.5.1" dependencies = [ "assert_cmd", + "brotli", "bytes", "clap", "ctrlc", + "flate2", "http", "http-body-util", "http-serde", @@ -4426,6 +4459,7 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ + "async-compression", "bitflags 2.9.4", "bytes", "futures-core", diff --git a/Cargo.toml b/Cargo.toml index 8ffa6be..8110025 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,7 +47,7 @@ tokio-rustls = "0.26.0" scru128 = { version = "3", features = ["serde"] } tracing = "0.1.41" tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } -tower-http = { version = "0.6.6", features = ["fs"] } +tower-http = { version = "0.6.6", features = ["fs", "compression-br", "compression-gzip"] } tower = { version = "0.5.2", features = ["util"] } ureq = { version = "2.12.1", default-features = false, features = ["tls"] } @@ -58,6 +58,8 @@ tempfile = "3.10.1" assert_cmd = "2.0" nix = { version = "0.30", features = ["signal", "process"] } sysinfo = "0.30.12" +brotli = "8" +flate2 = "1.0" diff --git a/src/main.rs b/src/main.rs index a2d1336..d0567b5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,9 +6,11 @@ use std::sync::{ }; use clap::Parser; -use hyper::service::service_fn; use hyper_util::rt::TokioIo; +use hyper_util::service::TowerToHyperService; use tokio::signal; +use tower::ServiceBuilder; +use tower_http::compression::CompressionLayer; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; use http_nu::{ @@ -93,9 +95,15 @@ async fn serve( let engine = engine.clone(); tokio::task::spawn(async move { - let service = service_fn(move |req| { - handle(engine.clone(), remote_addr, req) - }); + // Configure compression layer for automatic content-encoding negotiation + let compression = CompressionLayer::new() + .quality(tower_http::CompressionLevel::Default); + let tower_service = ServiceBuilder::new() + .layer(compression) + .service_fn(move |req| { + handle(engine.clone(), remote_addr, req) + }); + let service = TowerToHyperService::new(tower_service); if let Err(err) = hyper::server::conn::http1::Builder::new() .serve_connection(io, service) .await diff --git a/tests/server_test.rs b/tests/server_test.rs index d0a729b..2478cec 100644 --- a/tests/server_test.rs +++ b/tests/server_test.rs @@ -1,6 +1,33 @@ mod common; use common::TestServer; +/// Helper function to parse HTTP response and verify compression encoding. +/// Returns the body bytes for manual decompression. +fn parse_and_verify_encoding<'a>(response_bytes: &'a [u8], expected_encoding: &str) -> &'a [u8] { + // Find the end of headers + let mut headers_end = 0; + for i in 0..response_bytes.len() - 3 { + if &response_bytes[i..i + 4] == b"\r\n\r\n" { + headers_end = i + 4; + break; + } + } + assert!(headers_end > 0, "Failed to find end of headers"); + + let headers = String::from_utf8_lossy(&response_bytes[..headers_end]); + let body = &response_bytes[headers_end..]; + + // Verify Content-Encoding header + let expected = format!("content-encoding: {}", expected_encoding); + assert!( + headers.contains(&expected), + "Expected {} header, got: {headers}", + expected + ); + + body +} + #[tokio::test] async fn test_server_startup_and_shutdown() { let _server = TestServer::new("127.0.0.1:0", "{|req| $req.method}", false).await; @@ -378,3 +405,244 @@ async fn test_server_unix_graceful_shutdown() { let status = server.wait_for_exit().await; assert!(status.success()); } + +#[tokio::test] +async fn test_brotli_compression_basic() { + // Use a larger response body to ensure compression is applied (tower-http has a min size threshold) + let large_text = "A".repeat(2000); + let closure = format!(r#"{{|req| "{}" }}"#, large_text); + let server = TestServer::new("127.0.0.1:0", &closure, false).await; + tokio::time::sleep(std::time::Duration::from_millis(500)).await; + + // Request with Accept-Encoding: br + let mut cmd = tokio::process::Command::new("curl"); + cmd.arg("-s") + .arg("-i") // Include headers in output + .arg("-H") + .arg("Accept-Encoding: br") + .arg(format!("http://{}", server.address)); + let output = cmd.output().await.expect("Failed to execute curl"); + assert!(output.status.success()); + + let body = parse_and_verify_encoding(&output.stdout, "br"); + + let mut decompressed = Vec::new(); + brotli::BrotliDecompress(&mut &body[..], &mut decompressed) + .expect("Failed to decompress brotli data"); + + let decompressed_text = + String::from_utf8(decompressed).expect("Decompressed data is not valid UTF-8"); + + assert_eq!( + decompressed_text, large_text, + "Decompressed content does not match expected text" + ); +} + +#[tokio::test] +async fn test_gzip_compression_basic() { + // Use a larger response body to ensure compression is applied + let large_text = "B".repeat(2000); + let closure = format!(r#"{{|req| "{}" }}"#, large_text); + let server = TestServer::new("127.0.0.1:0", &closure, false).await; + tokio::time::sleep(std::time::Duration::from_millis(500)).await; + + // Request with Accept-Encoding: gzip + let mut cmd = tokio::process::Command::new("curl"); + cmd.arg("-s") + .arg("-i") // Include headers in output + .arg("-H") + .arg("Accept-Encoding: gzip") + .arg(format!("http://{}", server.address)); + let output = cmd.output().await.expect("Failed to execute curl"); + assert!(output.status.success()); + + let body = parse_and_verify_encoding(&output.stdout, "gzip"); + + use flate2::read::GzDecoder; + use std::io::Read; + + let mut decoder = GzDecoder::new(body); + let mut decompressed = Vec::new(); + decoder + .read_to_end(&mut decompressed) + .expect("Failed to decompress gzip data"); + + let decompressed_text = + String::from_utf8(decompressed).expect("Decompressed data is not valid UTF-8"); + + assert_eq!( + decompressed_text, large_text, + "Decompressed content does not match expected text" + ); +} + +#[tokio::test] +async fn test_no_compression_without_accept_encoding() { + let server = TestServer::new("127.0.0.1:0", r#"{|req| "Hello, World!"}"#, false).await; + tokio::time::sleep(std::time::Duration::from_millis(500)).await; + + // Request without Accept-Encoding header + let mut cmd = tokio::process::Command::new("curl"); + cmd.arg("-s") + .arg("-i") // Include headers in output + .arg(format!("http://{}", server.address)); + let output = cmd.output().await.expect("Failed to execute curl"); + assert!(output.status.success()); + + let response = String::from_utf8_lossy(&output.stdout); + + // Verify Content-Encoding header is NOT set + assert!( + !response.contains("content-encoding:"), + "Expected no content-encoding header, got: {response}" + ); + + // Verify body is uncompressed + assert!( + response.contains("Hello, World!"), + "Expected uncompressed body to contain 'Hello, World!'" + ); +} + +#[tokio::test] +async fn test_compression_with_json_response() { + let server = TestServer::new( + "127.0.0.1:0", + r#"{|req| {message: "Hello", data: [1, 2, 3]} | to json}"#, + false, + ) + .await; + tokio::time::sleep(std::time::Duration::from_millis(500)).await; + + let mut cmd = tokio::process::Command::new("curl"); + cmd.arg("-s") + .arg("-i") + .arg("-H") + .arg("Accept-Encoding: br") + .arg(format!("http://{}", server.address)); + let output = cmd.output().await.expect("Failed to execute curl"); + assert!(output.status.success()); + + let body = parse_and_verify_encoding(&output.stdout, "br"); + + let mut decompressed = Vec::new(); + brotli::BrotliDecompress(&mut &body[..], &mut decompressed) + .expect("Failed to decompress brotli data"); + + let decompressed_text = + String::from_utf8(decompressed).expect("Decompressed data is not valid UTF-8"); + + assert!( + decompressed_text.contains(r#""message":"Hello""#) + || decompressed_text.contains(r#""message": "Hello""#), + "Expected decompressed JSON to contain message field" + ); +} + +#[tokio::test] +async fn test_compression_with_streaming_response() { + let server = TestServer::new( + "127.0.0.1:0", + r#"{|req| + .response {status: 200} + 1..5 | each {|i| $"Line ($i)\n"} + }"#, + false, + ) + .await; + tokio::time::sleep(std::time::Duration::from_millis(500)).await; + + let mut cmd = tokio::process::Command::new("curl"); + cmd.arg("-s") + .arg("-i") + .arg("-H") + .arg("Accept-Encoding: br") + .arg(format!("http://{}", server.address)); + let output = cmd.output().await.expect("Failed to execute curl"); + assert!(output.status.success()); + + let body = parse_and_verify_encoding(&output.stdout, "br"); + + let mut decompressed = Vec::new(); + brotli::BrotliDecompress(&mut &body[..], &mut decompressed) + .expect("Failed to decompress brotli data"); + + let decompressed_text = + String::from_utf8(decompressed).expect("Decompressed data is not valid UTF-8"); + + assert!(decompressed_text.contains("Line 1")); + assert!(decompressed_text.contains("Line 2")); + assert!(decompressed_text.contains("Line 3")); + assert!(decompressed_text.contains("Line 4")); +} + +#[tokio::test] +async fn test_compression_with_static_files() { + let tmp = tempfile::tempdir().unwrap(); + let file_path = tmp.path().join("test.txt"); + std::fs::write( + &file_path, + "This is a static file that should be compressed.", + ) + .unwrap(); + + let closure = format!( + "{{|req| .static '{}' $req.path }}", + tmp.path().to_str().unwrap() + ); + let server = TestServer::new("127.0.0.1:0", &closure, false).await; + tokio::time::sleep(std::time::Duration::from_millis(500)).await; + + let mut cmd = tokio::process::Command::new("curl"); + cmd.arg("-s") + .arg("-i") + .arg("-H") + .arg("Accept-Encoding: br") + .arg(format!("http://{}/test.txt", server.address)); + let output = cmd.output().await.expect("Failed to execute curl"); + assert!(output.status.success()); + + let body = parse_and_verify_encoding(&output.stdout, "br"); + + let mut decompressed = Vec::new(); + brotli::BrotliDecompress(&mut &body[..], &mut decompressed) + .expect("Failed to decompress brotli data"); + + let decompressed_text = + String::from_utf8(decompressed).expect("Decompressed data is not valid UTF-8"); + + assert!( + decompressed_text.contains("This is a static file that should be compressed."), + "Expected decompressed file content" + ); +} + +#[tokio::test] +async fn test_compression_prefers_brotli_over_gzip() { + // Use a larger response body to ensure compression is applied + let large_text = "C".repeat(2000); + let closure = format!(r#"{{|req| "{}" }}"#, large_text); + let server = TestServer::new("127.0.0.1:0", &closure, false).await; + tokio::time::sleep(std::time::Duration::from_millis(500)).await; + + // Request with both br and gzip in Accept-Encoding + let mut cmd = tokio::process::Command::new("curl"); + cmd.arg("-s") + .arg("-i") + .arg("-H") + .arg("Accept-Encoding: br, gzip") + .arg("--compressed") + .arg(format!("http://{}", server.address)); + let output = cmd.output().await.expect("Failed to execute curl"); + assert!(output.status.success()); + + let response = String::from_utf8_lossy(&output.stdout); + + // Verify br is preferred when both are accepted + // (tower-http's default preference order is br > gzip > deflate) + assert!( + response.contains("content-encoding: br") || response.contains("content-encoding: gzip"), + "Expected either br or gzip encoding" + ); +}