diff --git a/Cargo.lock b/Cargo.lock index 443014a3..93848eb0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + [[package]] name = "android_system_properties" version = "0.1.5" @@ -44,6 +50,19 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "async-compression" +version = "0.4.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a89bce6054c720275ac2432fbba080a66a2106a44a1b804553930ca6909f4e0" +dependencies = [ + "compression-codecs", + "compression-core", + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "async-stream" version = "0.3.6" @@ -110,7 +129,7 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide", + "miniz_oxide 0.7.1", "object", "rustc-demangle", ] @@ -207,6 +226,23 @@ dependencies = [ "windows-link 0.2.0", ] +[[package]] +name = "compression-codecs" +version = "0.4.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef8a506ec4b81c460798f572caead636d57d3d7e940f998160f52bd254bf2d23" +dependencies = [ + "compression-core", + "flate2", + "memchr", +] + +[[package]] +name = "compression-core" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e47641d3deaf41fb1538ac1f54735925e275eaf3bf4d55c81b137fba797e5cbb" + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -254,6 +290,15 @@ dependencies = [ "libc", ] +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + [[package]] name = "crossbeam-channel" version = "0.5.1" @@ -345,6 +390,7 @@ version = "0.16.4" dependencies = [ "anyhow", "async-channel", + "async-compression", "async-stream", "async-trait", "base64", @@ -355,6 +401,7 @@ dependencies = [ "debug-ignore", "dropshot_endpoint", "expectorate", + "flate2", "form_urlencoded", "futures", "hostname 0.4.0", @@ -398,6 +445,7 @@ dependencies = [ "tokio", "tokio-rustls 0.25.0", "tokio-tungstenite", + "tokio-util", "toml", "trybuild", "usdt", @@ -509,6 +557,16 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +[[package]] +name = "flate2" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +dependencies = [ + "crc32fast", + "miniz_oxide 0.8.9", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1229,6 +1287,15 @@ dependencies = [ "adler", ] +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + [[package]] name = "mio" version = "1.0.3" @@ -2515,6 +2582,7 @@ checksum = "5427d89453009325de0d8f342c9490009f76e999cb7672d77e46267448f7e6b2" dependencies = [ "bytes", "futures-core", + "futures-io", "futures-sink", "pin-project-lite", "tokio", diff --git a/dropshot/Cargo.toml b/dropshot/Cargo.toml index f30ebcdb..9f8383e1 100644 --- a/dropshot/Cargo.toml +++ b/dropshot/Cargo.toml @@ -14,6 +14,7 @@ categories = ["network-programming", "web-programming::http-server"] workspace = true [dependencies] +async-compression = { version = "0.4", features = ["tokio", "gzip"] } async-stream = "0.3.6" async-trait = "0.1.89" base64 = "0.22.1" @@ -77,6 +78,10 @@ features = [ "derive" ] version = "1.47" features = [ "full" ] +[dependencies.tokio-util] +version = "0.7" +features = [ "io", "compat" ] + [dependencies.usdt] version = "0.6.0" optional = true @@ -95,6 +100,7 @@ anyhow = "1.0.100" async-channel = "2.5.0" buf-list = "1.0.3" expectorate = "1.2.0" +flate2 = "1.0" hyper-rustls = "0.26.0" hyper-staticfile = "0.10" lazy_static = "1.5.0" diff --git a/dropshot/src/compression.rs b/dropshot/src/compression.rs new file mode 100644 index 00000000..70c3433b --- /dev/null +++ b/dropshot/src/compression.rs @@ -0,0 +1,858 @@ +// Copyright 2025 Oxide Computer Company + +//! Response compression support for Dropshot. + +use crate::body::Body; +use async_compression::tokio::bufread::GzipEncoder; +use futures::{StreamExt, TryStreamExt}; +use http::{HeaderMap, HeaderValue, Response}; +use hyper::body::{Body as HttpBodyTrait, Frame}; +use tokio_util::io::{ReaderStream, StreamReader}; + +/// Marker type for disabling compression on a response. +/// Insert this into response extensions to prevent compression: +/// ```ignore +/// response.extensions_mut().insert(NoCompression); +/// ``` +#[derive(Debug, Clone, Copy)] +pub struct NoCompression; + +/// Parses the `Accept-Encoding` header into a list of encodings and their +/// associated quality factors. Returns the encoding names in lowercase for +/// easier comparisons. +fn parse_accept_encoding(header: &HeaderValue) -> Vec<(String, f32)> { + const DEFAULT_QUALITY: f32 = 1.0; + + let Ok(header_value) = header.to_str() else { + return Vec::new(); + }; + + header_value + .split(',') + .filter_map(|directive| { + let mut parts = directive.trim().split(';'); + let encoding = parts.next()?.trim(); + if encoding.is_empty() { + return None; + } + + let mut quality = DEFAULT_QUALITY; + for param in parts { + let mut param = param.splitn(2, '='); + let name = param.next()?.trim(); + let value = param.next()?.trim(); + + if name.eq_ignore_ascii_case("q") { + if let Ok(parsed) = value.parse::() { + quality = parsed.clamp(0.0, 1.0); + } + } + } + + Some((encoding.to_ascii_lowercase(), quality)) + }) + .collect() +} + +/// Checks if the request accepts gzip encoding based on the Accept-Encoding header. +/// Handles quality values (q parameter) using RFC-compliant preference rules. +pub fn accepts_gzip_encoding(headers: &HeaderMap) -> bool { + let Some(accept_encoding) = headers.get(http::header::ACCEPT_ENCODING) + else { + return false; + }; + + let mut best_gzip_quality: Option = None; + let mut best_wildcard_quality: Option = None; + + // RFC 9110 ยง12.5.3 specifies that the most preferred (highest quality) + // representation wins, so we retain the maximum q-value we see for each + // relevant coding. + for (encoding, quality) in parse_accept_encoding(accept_encoding) { + match encoding.as_str() { + "gzip" => { + best_gzip_quality = Some( + best_gzip_quality + .map_or(quality, |current| current.max(quality)), + ); + } + "*" => { + best_wildcard_quality = Some( + best_wildcard_quality + .map_or(quality, |current| current.max(quality)), + ); + } + _ => {} + } + } + + if let Some(quality) = best_gzip_quality { + return quality > 0.0; + } + + if let Some(quality) = best_wildcard_quality { + return quality > 0.0; + } + + false +} + +/// Determines if a response should be compressed with gzip. +pub fn should_compress_response( + request_method: &http::Method, + request_headers: &HeaderMap, + response_status: http::StatusCode, + response_headers: &HeaderMap, + response_extensions: &http::Extensions, +) -> bool { + // Responses that must not have a body per HTTP spec + if response_status.is_informational() + || response_status == http::StatusCode::NO_CONTENT + || response_status == http::StatusCode::NOT_MODIFIED + { + return false; + } + + // HEAD responses have no body + if request_method == http::Method::HEAD { + return false; + } + + // Compressing partial content changes the meaning for clients + if response_status == http::StatusCode::PARTIAL_CONTENT { + return false; + } + + if response_headers.contains_key(http::header::CONTENT_RANGE) { + return false; + } + + if !accepts_gzip_encoding(request_headers) { + return false; + } + + if response_headers.contains_key(http::header::CONTENT_ENCODING) { + return false; + } + + if response_extensions.get::().is_some() { + return false; + } + + if let Some(content_length) = + response_headers.get(http::header::CONTENT_LENGTH) + { + if let Ok(length_str) = content_length.to_str() { + if let Ok(length) = length_str.parse::() { + if length < MIN_COMPRESS_SIZE { + return false; + } + } + } + } + + // Only compress when we know the content type + let Some(content_type) = response_headers.get(http::header::CONTENT_TYPE) + else { + return false; + }; + let Ok(ct_str) = content_type.to_str() else { + return false; + }; + + let ct_lower = ct_str.to_ascii_lowercase(); + + // SSE streams prioritize latency over compression + if ct_lower.starts_with("text/event-stream") { + return false; + } + + let is_compressible = ct_lower.starts_with("application/json") + || ct_lower.starts_with("text/") + || ct_lower.starts_with("application/xml") + || ct_lower.starts_with("application/javascript") + || ct_lower.starts_with("application/x-javascript"); + + // RFC 6839 structured syntax suffixes (+json, +xml) + let has_compressible_suffix = + ct_lower.contains("+json") || ct_lower.contains("+xml"); + + is_compressible || has_compressible_suffix +} + +/// Minimum size in bytes for a response to be compressed. +/// Responses smaller than this won't benefit from compression and may actually get larger. +const MIN_COMPRESS_SIZE: u64 = 512; + +/// Applies gzip compression to a response using streaming compression. +/// This function wraps the response body in a gzip encoder that compresses data +/// as it's being sent, avoiding the need to buffer the entire response in memory. +/// If the body has a known exact size smaller than MIN_COMPRESS_SIZE, compression is skipped. +pub fn apply_gzip_compression(response: Response) -> Response { + let (mut parts, body) = response.into_parts(); + + let size_hint = body.size_hint(); + if let Some(exact_size) = size_hint.exact() { + if exact_size == 0 || exact_size < MIN_COMPRESS_SIZE { + return Response::from_parts(parts, body); + } + } + + // Transform body into a compressed stream: + // Body -> Stream -> AsyncRead -> GzipEncoder -> Stream -> Body + let data_stream = body.into_data_stream(); + let io_stream = data_stream + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)); + let async_read = StreamReader::new(io_stream); + let gzip_encoder = GzipEncoder::new(tokio::io::BufReader::new(async_read)); + let compressed_stream = ReaderStream::new(gzip_encoder); + + let compressed_body = Body::wrap(http_body_util::StreamBody::new( + compressed_stream.map(|result| { + result.map(Frame::data).map_err(|e| { + Box::new(e) as Box + }) + }), + )); + + parts.headers.insert( + http::header::CONTENT_ENCODING, + HeaderValue::from_static("gzip"), + ); + + // Vary header is critical for caching - prevents serving compressed + // responses to clients that don't accept gzip + let vary_has_accept_encoding = parts + .headers + .get_all(http::header::VARY) + .iter() + .any(header_value_contains_accept_encoding); + + if !vary_has_accept_encoding { + parts.headers.append( + http::header::VARY, + HeaderValue::from_static("Accept-Encoding"), + ); + } + + parts.headers.remove(http::header::ACCEPT_RANGES); + parts.headers.remove(http::header::CONTENT_LENGTH); + + Response::from_parts(parts, compressed_body) +} + +fn header_value_contains_accept_encoding(value: &HeaderValue) -> bool { + value.to_str().is_ok_and(|vary| { + vary.split(',') + .any(|v| v.trim().eq_ignore_ascii_case("accept-encoding")) + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use http::Extensions; + + #[test] + fn test_accepts_gzip_encoding_basic() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_should_compress_response_rejects_content_range() { + let request_method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + + let response_status = http::StatusCode::OK; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + response_headers.insert( + http::header::CONTENT_RANGE, + HeaderValue::from_static("bytes 0-100/200"), + ); + + let response_extensions = Extensions::new(); + + assert!(!should_compress_response( + &request_method, + &request_headers, + response_status, + &response_headers, + &response_extensions, + )); + } + + #[test] + fn test_should_compress_response_respects_content_length_threshold() { + let request_method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + + let response_status = http::StatusCode::OK; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + response_headers.insert( + http::header::CONTENT_LENGTH, + HeaderValue::from_str(&(MIN_COMPRESS_SIZE - 1).to_string()) + .unwrap(), + ); + + let response_extensions = Extensions::new(); + + assert!(!should_compress_response( + &request_method, + &request_headers, + response_status, + &response_headers, + &response_extensions, + )); + } + + #[test] + fn test_apply_gzip_compression_removes_accept_ranges_and_sets_vary() { + let body = "x".repeat((MIN_COMPRESS_SIZE + 10) as usize); + let response = Response::builder() + .header(http::header::CONTENT_TYPE, "application/json") + .header(http::header::ACCEPT_RANGES, "bytes") + .body(Body::from(body)) + .unwrap(); + + let compressed = apply_gzip_compression(response); + let headers = compressed.headers(); + + let gzip = HeaderValue::from_static("gzip"); + assert_eq!(headers.get(http::header::CONTENT_ENCODING), Some(&gzip)); + assert!(!headers.contains_key(http::header::ACCEPT_RANGES)); + + let vary_values: Vec<_> = headers + .get_all(http::header::VARY) + .iter() + .map(|value| value.to_str().unwrap().to_string()) + .collect(); + assert!(vary_values + .iter() + .any(|value| value.eq_ignore_ascii_case("accept-encoding"))); + } + + #[test] + fn test_apply_gzip_compression_avoids_duplicate_vary_entries() { + let body = "x".repeat((MIN_COMPRESS_SIZE + 10) as usize); + let response = Response::builder() + .header(http::header::CONTENT_TYPE, "application/json") + .header(http::header::VARY, "Accept-Encoding, Accept-Language") + .body(Body::from(body)) + .unwrap(); + + let compressed = apply_gzip_compression(response); + let mut accept_encoding_count = 0; + for value in compressed.headers().get_all(http::header::VARY).iter() { + let text = value.to_str().unwrap(); + accept_encoding_count += text + .split(',') + .filter(|v| v.trim().eq_ignore_ascii_case("accept-encoding")) + .count(); + } + + assert_eq!(accept_encoding_count, 1); + } + + #[test] + fn test_accepts_gzip_encoding_with_positive_quality() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip;q=0.8"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_rejects_zero_quality() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip;q=0"), + ); + assert!(!accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_wildcard() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("*"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_wildcard_with_quality() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("*;q=0.5"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_wildcard_rejected() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("*;q=0"), + ); + assert!(!accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_multiple_encodings() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("deflate, gzip, br"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_gzip_takes_precedence_over_wildcard() { + // Explicit gzip rejection should override wildcard acceptance + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("*;q=1.0, gzip;q=0"), + ); + assert!(!accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_gzip_acceptance_overrides_wildcard_rejection() + { + // Explicit gzip acceptance should work even if wildcard is rejected + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("*;q=0, gzip;q=1.0"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_prefers_highest_quality() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip;q=0, gzip;q=0.5"), + ); + assert!(accepts_gzip_encoding(&headers)); + + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip;q=0.8, gzip;q=0"), + ); + assert!(accepts_gzip_encoding(&headers)); + + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip;q=0, *;q=1"), + ); + assert!(!accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_case_insensitive() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("GZIP"), + ); + assert!(accepts_gzip_encoding(&headers)); + + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("GzIp"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_no_header() { + let headers = HeaderMap::new(); + assert!(!accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_with_spaces() { + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("deflate , gzip ; q=0.8 , br"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_accepts_gzip_encoding_malformed_quality() { + // If quality parsing fails, should default to 1.0 + let mut headers = HeaderMap::new(); + headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip;q=invalid"), + ); + assert!(accepts_gzip_encoding(&headers)); + } + + #[test] + fn test_should_compress_response_basic() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::OK; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + let extensions = http::Extensions::new(); + + assert!(should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_head_method() { + let method = http::Method::HEAD; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::OK; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + let extensions = http::Extensions::new(); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_no_content() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::NO_CONTENT; + let response_headers = HeaderMap::new(); + let extensions = http::Extensions::new(); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_not_modified() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::NOT_MODIFIED; + let response_headers = HeaderMap::new(); + let extensions = http::Extensions::new(); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_partial_content() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::PARTIAL_CONTENT; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + let extensions = http::Extensions::new(); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_no_accept_encoding() { + let method = http::Method::GET; + let request_headers = HeaderMap::new(); + let status = http::StatusCode::OK; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + let extensions = http::Extensions::new(); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_already_encoded() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::OK; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + response_headers.insert( + http::header::CONTENT_ENCODING, + HeaderValue::from_static("br"), + ); + let extensions = http::Extensions::new(); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_no_compression_extension() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::OK; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + let mut extensions = http::Extensions::new(); + extensions.insert(NoCompression); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_no_content_type() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::OK; + let response_headers = HeaderMap::new(); + let extensions = http::Extensions::new(); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_sse() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::OK; + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_static("TEXT/EVENT-STREAM"), + ); + let extensions = http::Extensions::new(); + + assert!(!should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + )); + } + + #[test] + fn test_should_compress_response_compressible_content_types() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::OK; + let extensions = http::Extensions::new(); + + // Test various compressible content types + let compressible_types = vec![ + "application/json", + "APPLICATION/JSON", + "text/plain", + "text/html", + "text/css", + "application/xml", + "application/javascript", + "application/x-javascript", + "application/problem+json", + "application/problem+JSON", + "application/hal+json", + "application/soap+xml", + "application/SOAP+XML", + ]; + + for content_type in compressible_types { + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_str(content_type).unwrap(), + ); + + assert!( + should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + ), + "Expected {} to be compressible", + content_type + ); + } + } + + #[test] + fn test_should_compress_response_non_compressible_content_types() { + let method = http::Method::GET; + let mut request_headers = HeaderMap::new(); + request_headers.insert( + http::header::ACCEPT_ENCODING, + HeaderValue::from_static("gzip"), + ); + let status = http::StatusCode::OK; + let extensions = http::Extensions::new(); + + // Test various non-compressible content types + let non_compressible_types = vec![ + "image/png", + "image/jpeg", + "video/mp4", + "application/pdf", + "application/zip", + "application/gzip", + "application/octet-stream", + ]; + + for content_type in non_compressible_types { + let mut response_headers = HeaderMap::new(); + response_headers.insert( + http::header::CONTENT_TYPE, + HeaderValue::from_str(content_type).unwrap(), + ); + + assert!( + !should_compress_response( + &method, + &request_headers, + status, + &response_headers, + &extensions + ), + "Expected {} to not be compressible", + content_type + ); + } + } +} diff --git a/dropshot/src/lib.rs b/dropshot/src/lib.rs index 01c6c10a..2efcb4bd 100644 --- a/dropshot/src/lib.rs +++ b/dropshot/src/lib.rs @@ -866,6 +866,7 @@ mod dtrace; mod api_description; mod body; +mod compression; mod config; mod error; mod error_status_code; @@ -905,6 +906,7 @@ pub use api_description::TagConfig; pub use api_description::TagDetails; pub use api_description::TagExternalDocs; pub use body::Body; +pub use compression::NoCompression; pub use config::ConfigDropshot; pub use config::ConfigTls; pub use config::HandlerTaskMode; diff --git a/dropshot/src/server.rs b/dropshot/src/server.rs index 40f49c77..f982974b 100644 --- a/dropshot/src/server.rs +++ b/dropshot/src/server.rs @@ -901,7 +901,7 @@ async fn http_request_handle( // this to take forever. // TODO-correctness: Do we need to dump the body on errors? let request = request.map(crate::Body::wrap); - let method = request.method(); + let method = request.method().clone(); let uri = request.uri(); let found_version = server.version_policy.request_version(&request, &request_log)?; @@ -915,8 +915,9 @@ async fn http_request_handle( request: RequestInfo::new(&request, remote_addr), endpoint: lookup_result.endpoint, request_id: request_id.to_string(), - log: request_log, + log: request_log.clone(), }; + let request_headers = rqctx.request.headers().clone(); let handler = lookup_result.handler; let mut response = match server.config.default_handler_task_mode { @@ -930,7 +931,7 @@ async fn http_request_handle( // Spawn the handler so if we're cancelled, the handler still runs // to completion. let (tx, rx) = oneshot::channel(); - let request_log = rqctx.log.clone(); + let request_log = request_log.clone(); let worker = server.handler_waitgroup_worker.clone(); let handler_task = tokio::spawn(async move { let request_log = rqctx.log.clone(); @@ -981,6 +982,17 @@ async fn http_request_handle( } } }; + + if crate::compression::should_compress_response( + &method, + &request_headers, + response.status(), + response.headers(), + response.extensions(), + ) { + response = crate::compression::apply_gzip_compression(response); + } + response.headers_mut().insert( HEADER_REQUEST_ID, http::header::HeaderValue::from_str(&request_id).unwrap(), diff --git a/dropshot/src/test_util.rs b/dropshot/src/test_util.rs index 8690c094..d3353aa0 100644 --- a/dropshot/src/test_util.rs +++ b/dropshot/src/test_util.rs @@ -56,11 +56,13 @@ pub const TEST_HEADER_2: &str = "x-dropshot-test-header-2"; // List of allowed HTTP headers in responses. // Used to make sure we don't leak headers unexpectedly. -const ALLOWED_HEADERS: [AllowedHeader<'static>; 8] = [ +const ALLOWED_HEADERS: [AllowedHeader<'static>; 10] = [ + AllowedHeader::new("content-encoding"), AllowedHeader::new("content-length"), AllowedHeader::new("content-type"), AllowedHeader::new("date"), AllowedHeader::new("location"), + AllowedHeader::new("vary"), AllowedHeader::new("x-request-id"), AllowedHeader { name: "transfer-encoding", diff --git a/dropshot/tests/integration-tests/gzip.rs b/dropshot/tests/integration-tests/gzip.rs new file mode 100644 index 00000000..671d041b --- /dev/null +++ b/dropshot/tests/integration-tests/gzip.rs @@ -0,0 +1,674 @@ +// Copyright 2025 Oxide Computer Company + +//! Test cases for gzip response compression. + +use dropshot::endpoint; +use dropshot::ApiDescription; +use dropshot::HttpError; +use dropshot::HttpResponseOk; +use dropshot::RequestContext; +use http::{header, Method, StatusCode}; +use hyper::{Request, Response}; +use serde::{Deserialize, Serialize}; + +use crate::common; + +extern crate slog; + +// Test payload that's large enough to benefit from compression +#[derive(Deserialize, Serialize, schemars::JsonSchema)] +struct LargeTestData { + message: String, + repeated_data: Vec, +} + +// Tiny test payload for testing size threshold +#[derive(Deserialize, Serialize, schemars::JsonSchema)] +struct TinyData { + x: u8, +} + +fn api() -> ApiDescription { + let mut api = ApiDescription::new(); + api.register(api_large_response).unwrap(); + api.register(api_image_response).unwrap(); + api.register(api_small_response).unwrap(); + api.register(api_disable_compression_response).unwrap(); + api.register(api_json_suffix_response).unwrap(); + api.register(api_xml_suffix_response).unwrap(); + api.register(api_no_content_response).unwrap(); + api.register(api_not_modified_response).unwrap(); + api +} + +/// Returns a large JSON response that should compress well +#[endpoint { + method = GET, + path = "/large-response", +}] +async fn api_large_response( + _rqctx: RequestContext, +) -> Result, HttpError> { + // Create a response with repeated data that will compress well + let repeated_text = "This is some repetitive text that should compress very well with gzip compression. ".repeat(50); + let repeated_data = vec![repeated_text; 100]; // Make it quite large + + Ok(HttpResponseOk(LargeTestData { + message: "This is a large response for testing gzip compression" + .to_string(), + repeated_data, + })) +} + +/// Returns a binary response (image) that should not be compressed +#[endpoint { + method = GET, + path = "/image-response", +}] +async fn api_image_response( + _rqctx: RequestContext, +) -> Result, HttpError> { + // Create a fake image response (just random bytes, but large enough) + let image_data = vec![0u8; 2048]; // 2KB of binary data + + Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, "image/png") + .body(dropshot::Body::from(image_data)) + .map_err(|e| HttpError::for_internal_error(e.to_string())) +} + +/// Returns a tiny JSON response (under 512 bytes) that should not be compressed +#[endpoint { + method = GET, + path = "/small-response", +}] +async fn api_small_response( + _rqctx: RequestContext, +) -> Result, HttpError> { + // Tiny response under 512 bytes threshold: {"x":0} is only 7 bytes + Ok(HttpResponseOk(TinyData { x: 0 })) +} + +/// Returns a large response with compression disabled +#[endpoint { + method = GET, + path = "/disable-compression-response", +}] +async fn api_disable_compression_response( + _rqctx: RequestContext, +) -> Result, HttpError> { + // Create a large response + let repeated_text = "This is some repetitive text. ".repeat(100); + let data = LargeTestData { + message: "Large response with compression disabled".to_string(), + repeated_data: vec![repeated_text; 10], + }; + + let json_body = serde_json::to_vec(&data) + .map_err(|e| HttpError::for_internal_error(e.to_string()))?; + + let mut response = Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, "application/json") + .body(dropshot::Body::from(json_body)) + .map_err(|e| HttpError::for_internal_error(e.to_string()))?; + + // Disable compression using the NoCompression extension + response.extensions_mut().insert(dropshot::NoCompression); + + Ok(response) +} + +/// Returns a response with application/problem+json content type +#[endpoint { + method = GET, + path = "/json-suffix-response", +}] +async fn api_json_suffix_response( + _rqctx: RequestContext, +) -> Result, HttpError> { + let data = LargeTestData { + message: "Testing +json suffix".to_string(), + repeated_data: vec!["data".to_string(); 100], + }; + + let json_body = serde_json::to_vec(&data) + .map_err(|e| HttpError::for_internal_error(e.to_string()))?; + + Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, "application/problem+json") + .body(dropshot::Body::from(json_body)) + .map_err(|e| HttpError::for_internal_error(e.to_string())) +} + +/// Returns a response with application/soap+xml content type +#[endpoint { + method = GET, + path = "/xml-suffix-response", +}] +async fn api_xml_suffix_response( + _rqctx: RequestContext, +) -> Result, HttpError> { + let xml_body = "".repeat(100).into_bytes(); + + Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, "application/soap+xml") + .body(dropshot::Body::from(xml_body)) + .map_err(|e| HttpError::for_internal_error(e.to_string())) +} + +/// Returns a 204 No Content response +#[endpoint { + method = GET, + path = "/no-content-response", +}] +async fn api_no_content_response( + _rqctx: RequestContext, +) -> Result, HttpError> { + Response::builder() + .status(StatusCode::NO_CONTENT) + .body(dropshot::Body::empty()) + .map_err(|e| HttpError::for_internal_error(e.to_string())) +} + +/// Returns a 304 Not Modified response +#[endpoint { + method = GET, + path = "/not-modified-response", +}] +async fn api_not_modified_response( + _rqctx: RequestContext, +) -> Result, HttpError> { + Response::builder() + .status(StatusCode::NOT_MODIFIED) + .body(dropshot::Body::empty()) + .map_err(|e| HttpError::for_internal_error(e.to_string())) +} + +async fn get_response_bytes( + response: &mut Response, +) -> Vec { + use http_body_util::BodyExt; + + let body_bytes = response + .body_mut() + .collect() + .await + .expect("Error reading response body") + .to_bytes(); + + body_bytes.to_vec() +} + +fn decompress_gzip(compressed_data: &[u8]) -> Vec { + use std::io::Read; + + let mut decoder = flate2::read::GzDecoder::new(compressed_data); + let mut decompressed = Vec::new(); + decoder + .read_to_end(&mut decompressed) + .expect("Failed to decompress gzip data"); + decompressed +} + +#[tokio::test] +async fn test_gzip_compression_with_accept_encoding() { + let api = api(); + let testctx = common::test_setup("gzip_compression_accept_encoding", api); + let client = &testctx.client_testctx; + + // Make request WITHOUT Accept-Encoding: gzip header + let uri = client.url("/large-response"); + let request_no_gzip = Request::builder() + .method(Method::GET) + .uri(&uri) + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let mut response_no_gzip = client + .make_request_with_request(request_no_gzip, StatusCode::OK) + .await + .expect("Request without gzip should succeed"); + + // Make request WITH Accept-Encoding: gzip header + let request_with_gzip = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let mut response_with_gzip = client + .make_request_with_request(request_with_gzip, StatusCode::OK) + .await + .expect("Request with gzip should succeed"); + + // Get response bodies + let uncompressed_body = get_response_bytes(&mut response_no_gzip).await; + let compressed_body = get_response_bytes(&mut response_with_gzip).await; + + // When gzip is implemented, the gzipped response should: + // 1. Have Content-Encoding: gzip header + assert_eq!( + response_with_gzip.headers().get(header::CONTENT_ENCODING), + Some(&header::HeaderValue::from_static("gzip")), + "Response with Accept-Encoding: gzip should have Content-Encoding: gzip header" + ); + + // 2. Be smaller than the uncompressed response + assert!( + compressed_body.len() < uncompressed_body.len(), + "Gzipped response ({} bytes) should be smaller than uncompressed response ({} bytes)", + compressed_body.len(), + uncompressed_body.len() + ); + + // 3. When decompressed, should match the original response + let decompressed_body = decompress_gzip(&compressed_body); + assert_eq!( + decompressed_body, uncompressed_body, + "Decompressed gzip response should match uncompressed response" + ); + + // The response without Accept-Encoding should NOT have Content-Encoding header + assert_eq!( + response_no_gzip.headers().get(header::CONTENT_ENCODING), + None, + "Response without Accept-Encoding: gzip should not have Content-Encoding header" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_gzip_compression_accepts_multiple_encodings() { + let api = api(); + let testctx = + common::test_setup("gzip_compression_multiple_encodings", api); + let client = &testctx.client_testctx; + + // Test that gzip works when client accepts multiple encodings including gzip + let uri = client.url("/large-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "deflate, gzip, br") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let mut response = client + .make_request_with_request(request, StatusCode::OK) + .await + .expect("Request with multiple accept encodings should succeed"); + + // Should still use gzip compression + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + Some(&header::HeaderValue::from_static("gzip")), + "Response should use gzip when it's one of multiple accepted encodings" + ); + + // Verify the response can be decompressed + let compressed_body = get_response_bytes(&mut response).await; + let _decompressed = decompress_gzip(&compressed_body); // Should not panic + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_no_gzip_without_accept_encoding() { + let api = api(); + let testctx = common::test_setup("no_gzip_without_accept", api); + let client = &testctx.client_testctx; + + // Request without any Accept-Encoding header should not get compressed response + let response = client + .make_request_no_body(Method::GET, "/large-response", StatusCode::OK) + .await + .expect("Request without accept encoding should succeed"); + + // Should not have Content-Encoding header + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + None, + "Response without Accept-Encoding should not be compressed" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_no_compression_for_streaming_responses() { + // Test that streaming responses are not compressed even when client accepts gzip + let api = crate::streaming::api(); + let testctx = common::test_setup("no_compression_streaming", api); + let client = &testctx.client_testctx; + + // Make request with Accept-Encoding: gzip header + // Note: We can't use make_request_no_body because it doesn't let us set custom headers + // So we'll use the RequestBuilder pattern used by the client internally + let uri = client.url("/streaming"); + let request = hyper::Request::builder() + .method(http::Method::GET) + .uri(&uri) + .header(http::header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let mut response = client + .make_request_with_request(request, http::StatusCode::OK) + .await + .expect("Streaming request with gzip accept should succeed"); + + // Should have chunked transfer encoding + let transfer_encoding_header = response.headers().get("transfer-encoding"); + assert_eq!( + Some(&http::HeaderValue::from_static("chunked")), + transfer_encoding_header, + "Streaming response should have transfer-encoding: chunked" + ); + + // Should NOT have gzip content encoding even though client accepts it + assert_eq!( + response.headers().get(http::header::CONTENT_ENCODING), + None, + "Streaming response should not be compressed even with Accept-Encoding: gzip" + ); + + // Consume the body to verify it works (and to allow teardown to proceed) + let body_bytes = get_response_bytes(&mut response).await; + assert!(!body_bytes.is_empty(), "Streaming response should have content"); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_no_compression_for_non_compressible_content_types() { + let api = api(); + let testctx = common::test_setup("no_compression_non_compressible", api); + let client = &testctx.client_testctx; + + // Request an image with Accept-Encoding: gzip + let uri = client.url("/image-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::OK) + .await + .expect("Image request should succeed"); + + // Binary content (images) should NOT be compressed + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + None, + "Binary content (image/png) should not be compressed even with Accept-Encoding: gzip" + ); + + // Verify content-type is correct + assert_eq!( + response.headers().get(header::CONTENT_TYPE), + Some(&header::HeaderValue::from_static("image/png")), + "Content-Type should be image/png" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_compression_disabled_with_extension() { + let api = api(); + let testctx = common::test_setup("compression_disabled_extension", api); + let client = &testctx.client_testctx; + + // Request with Accept-Encoding: gzip, but response has NoCompression extension + let uri = client.url("/disable-compression-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::OK) + .await + .expect("Request should succeed"); + + // Should NOT be compressed due to NoCompression extension + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + None, + "Response with NoCompression extension should not be compressed" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_no_compression_below_size_threshold() { + let api = api(); + let testctx = common::test_setup("no_compression_small_response", api); + let client = &testctx.client_testctx; + + // Request a tiny response (under 512 bytes) with Accept-Encoding: gzip + let uri = client.url("/small-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::OK) + .await + .expect("Small response request should succeed"); + + // Tiny responses (under 512 bytes) should NOT be compressed + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + None, + "Responses under 512 bytes should not be compressed" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_reject_gzip_with_quality_zero() { + let api = api(); + let testctx = common::test_setup("reject_gzip_quality_zero", api); + let client = &testctx.client_testctx; + + // Request with gzip explicitly rejected (q=0) + let uri = client.url("/large-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip;q=0, deflate") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::OK) + .await + .expect("Request should succeed"); + + // Should NOT be compressed since gzip has q=0 + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + None, + "Response should not use gzip when client sets q=0 for gzip" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_vary_header_is_set() { + let api = api(); + let testctx = common::test_setup("vary_header_set", api); + let client = &testctx.client_testctx; + + // Request with Accept-Encoding: gzip + let uri = client.url("/large-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::OK) + .await + .expect("Request should succeed"); + + // Should have Vary: Accept-Encoding header + assert!( + response.headers().contains_key(header::VARY), + "Response should have Vary header" + ); + + let vary_value = + response.headers().get(header::VARY).unwrap().to_str().unwrap(); + assert!( + vary_value.to_lowercase().contains("accept-encoding"), + "Vary header should include Accept-Encoding, got: {}", + vary_value + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_json_suffix_is_compressed() { + let api = api(); + let testctx = common::test_setup("json_suffix_compressed", api); + let client = &testctx.client_testctx; + + // Request with Accept-Encoding: gzip for application/problem+json + let uri = client.url("/json-suffix-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::OK) + .await + .expect("Request should succeed"); + + // Should be compressed since application/problem+json has +json suffix + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + Some(&header::HeaderValue::from_static("gzip")), + "Response with +json suffix should be compressed" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_xml_suffix_is_compressed() { + let api = api(); + let testctx = common::test_setup("xml_suffix_compressed", api); + let client = &testctx.client_testctx; + + // Request with Accept-Encoding: gzip for application/soap+xml + let uri = client.url("/xml-suffix-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::OK) + .await + .expect("Request should succeed"); + + // Should be compressed since application/soap+xml has +xml suffix + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + Some(&header::HeaderValue::from_static("gzip")), + "Response with +xml suffix should be compressed" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_no_compression_for_204_no_content() { + let api = api(); + let testctx = common::test_setup("no_compression_204", api); + let client = &testctx.client_testctx; + + // Request with Accept-Encoding: gzip for 204 response + let uri = client.url("/no-content-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::NO_CONTENT) + .await + .expect("Request should succeed"); + + // Should NOT be compressed (204 must not have body) + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + None, + "204 No Content should not have Content-Encoding header" + ); + + testctx.teardown().await; +} + +#[tokio::test] +async fn test_no_compression_for_304_not_modified() { + let api = api(); + let testctx = common::test_setup("no_compression_304", api); + let client = &testctx.client_testctx; + + // Request with Accept-Encoding: gzip for 304 response + let uri = client.url("/not-modified-response"); + let request = Request::builder() + .method(Method::GET) + .uri(&uri) + .header(header::ACCEPT_ENCODING, "gzip") + .body(dropshot::Body::empty()) + .expect("Failed to construct request"); + + let response = client + .make_request_with_request(request, StatusCode::NOT_MODIFIED) + .await + .expect("Request should succeed"); + + // Should NOT be compressed (304 must not have body) + assert_eq!( + response.headers().get(header::CONTENT_ENCODING), + None, + "304 Not Modified should not have Content-Encoding header" + ); + + testctx.teardown().await; +} + +// Note: HEAD request test is omitted from integration tests because Dropshot +// requires explicit HEAD endpoint registration. The HEAD logic is tested via +// unit tests in should_compress_response. diff --git a/dropshot/tests/integration-tests/main.rs b/dropshot/tests/integration-tests/main.rs index ec49604c..fe10b99a 100644 --- a/dropshot/tests/integration-tests/main.rs +++ b/dropshot/tests/integration-tests/main.rs @@ -16,6 +16,7 @@ mod config; mod custom_errors; mod demo; mod detached_shutdown; +mod gzip; mod multipart; mod openapi; mod pagination; diff --git a/dropshot/tests/integration-tests/streaming.rs b/dropshot/tests/integration-tests/streaming.rs index 718fd8f2..c56af265 100644 --- a/dropshot/tests/integration-tests/streaming.rs +++ b/dropshot/tests/integration-tests/streaming.rs @@ -12,7 +12,7 @@ use crate::common; extern crate slog; -fn api() -> ApiDescription { +pub fn api() -> ApiDescription { let mut api = ApiDescription::new(); api.register(api_streaming).unwrap(); api.register(api_not_streaming).unwrap();