Skip to content

Commit abd96f3

Browse files
fix: forward browser user-agent in OpenRTB device.ua field (#276)
1 parent 7216320 commit abd96f3

File tree

5 files changed

+62
-130
lines changed

5 files changed

+62
-130
lines changed

crates/common/src/auction/formats.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ pub fn convert_tsjs_to_auction_request(
108108
}
109109

110110
// Extract bidder params from the bids array
111-
let mut bidders = std::collections::HashMap::new();
111+
let mut bidders = HashMap::new();
112112
if let Some(bids) = &unit.bids {
113113
for bid in bids {
114114
bidders.insert(bid.bidder.clone(), bid.params.clone());
@@ -119,20 +119,20 @@ pub fn convert_tsjs_to_auction_request(
119119
id: unit.code.clone(),
120120
formats,
121121
floor_price: None,
122-
targeting: std::collections::HashMap::new(),
122+
targeting: HashMap::new(),
123123
bidders,
124124
});
125125
}
126126
}
127127
}
128128

129-
// Get geo info if available
130-
let device = GeoInfo::from_request(req).map(|geo| DeviceInfo {
129+
// Build device info with user-agent (always) and geo (if available)
130+
let device = Some(DeviceInfo {
131131
user_agent: req
132132
.get_header_str("user-agent")
133133
.map(std::string::ToString::to_string),
134134
ip: req.get_client_ip_addr().map(|ip| ip.to_string()),
135-
geo: Some(geo),
135+
geo: GeoInfo::from_request(req),
136136
});
137137

138138
Ok(AuctionRequest {

crates/common/src/integrations/prebid.rs

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,7 @@ fn transform_prebid_response(
302302

303303
fn rewrite_ad_markup(markup: &str, request_host: &str, request_scheme: &str) -> String {
304304
let mut content = markup.to_string();
305-
let cdn_patterns = vec![
305+
let cdn_patterns = [
306306
("https://cdn.adsrvr.org", "adsrvr"),
307307
("https://ib.adnxs.com", "adnxs"),
308308
("https://rtb.openx.net", "openx"),
@@ -451,16 +451,15 @@ impl PrebidAuctionProvider {
451451
}),
452452
});
453453

454-
// Build device object with geo if available
455-
let device = request.device.as_ref().and_then(|d| {
456-
d.geo.as_ref().map(|geo| Device {
457-
geo: Some(Geo {
458-
geo_type: 2, // IP address per OpenRTB spec
459-
country: Some(geo.country.clone()),
460-
city: Some(geo.city.clone()),
461-
region: geo.region.clone(),
462-
}),
463-
})
454+
// Build device object with user-agent and geo if available
455+
let device = request.device.as_ref().map(|d| Device {
456+
ua: d.user_agent.clone(),
457+
geo: d.geo.as_ref().map(|geo| Geo {
458+
geo_type: 2, // IP address per OpenRTB spec
459+
country: Some(geo.country.clone()),
460+
city: Some(geo.city.clone()),
461+
region: geo.region.clone(),
462+
}),
464463
});
465464

466465
// Build regs object if Sec-GPC header is present

crates/common/src/openrtb.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,8 @@ pub struct UserExt {
6666

6767
#[derive(Debug, Serialize, Default)]
6868
pub struct Device {
69+
#[serde(skip_serializing_if = "Option::is_none")]
70+
pub ua: Option<String>,
6971
#[serde(skip_serializing_if = "Option::is_none")]
7072
pub geo: Option<Geo>,
7173
}

crates/common/src/proxy.rs

Lines changed: 20 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -361,6 +361,22 @@ fn finalize_proxied_response_streaming(
361361
beresp
362362
}
363363

364+
/// Finalize a proxied response, choosing between streaming passthrough and full
365+
/// content processing based on the `stream_passthrough` flag.
366+
fn finalize_response(
367+
settings: &Settings,
368+
req: &Request,
369+
url: &str,
370+
beresp: Response,
371+
stream_passthrough: bool,
372+
) -> Result<Response, Report<TrustedServerError>> {
373+
if stream_passthrough {
374+
Ok(finalize_proxied_response_streaming(req, url, beresp))
375+
} else {
376+
finalize_proxied_response(settings, req, url, beresp)
377+
}
378+
}
379+
364380
/// Proxy a request to a clear target URL while reusing creative rewrite logic.
365381
///
366382
/// This forwards a curated header set, follows redirects when enabled, and can append
@@ -501,15 +517,7 @@ async fn proxy_with_redirects(
501517
})?;
502518

503519
if !follow_redirects {
504-
return if stream_passthrough {
505-
Ok(finalize_proxied_response_streaming(
506-
req,
507-
&current_url,
508-
beresp,
509-
))
510-
} else {
511-
finalize_proxied_response(settings, req, &current_url, beresp)
512-
};
520+
return finalize_response(settings, req, &current_url, beresp, stream_passthrough);
513521
}
514522

515523
let status = beresp.get_status();
@@ -523,31 +531,15 @@ async fn proxy_with_redirects(
523531
);
524532

525533
if !is_redirect {
526-
return if stream_passthrough {
527-
Ok(finalize_proxied_response_streaming(
528-
req,
529-
&current_url,
530-
beresp,
531-
))
532-
} else {
533-
finalize_proxied_response(settings, req, &current_url, beresp)
534-
};
534+
return finalize_response(settings, req, &current_url, beresp, stream_passthrough);
535535
}
536536

537537
let Some(location) = beresp
538538
.get_header(header::LOCATION)
539539
.and_then(|h| h.to_str().ok())
540540
.filter(|value| !value.is_empty())
541541
else {
542-
return if stream_passthrough {
543-
Ok(finalize_proxied_response_streaming(
544-
req,
545-
&current_url,
546-
beresp,
547-
))
548-
} else {
549-
finalize_proxied_response(settings, req, &current_url, beresp)
550-
};
542+
return finalize_response(settings, req, &current_url, beresp, stream_passthrough);
551543
};
552544

553545
if redirect_attempt == MAX_REDIRECTS {
@@ -568,15 +560,7 @@ async fn proxy_with_redirects(
568560

569561
let next_scheme = next_url.scheme().to_ascii_lowercase();
570562
if next_scheme != "http" && next_scheme != "https" {
571-
return if stream_passthrough {
572-
Ok(finalize_proxied_response_streaming(
573-
req,
574-
&current_url,
575-
beresp,
576-
))
577-
} else {
578-
finalize_proxied_response(settings, req, &current_url, beresp)
579-
};
563+
return finalize_response(settings, req, &current_url, beresp, stream_passthrough);
580564
}
581565

582566
log::info!(

crates/common/src/streaming_processor.rs

Lines changed: 25 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -224,36 +224,34 @@ impl<P: StreamProcessor> StreamingPipeline<P> {
224224
Ok(())
225225
}
226226

227-
/// Process gzip compressed input to uncompressed output (decompression only)
228-
fn process_gzip_to_none<R: Read, W: Write>(
227+
/// Decompress input, process content, and write uncompressed output.
228+
fn decompress_and_process<R: Read, W: Write>(
229229
&mut self,
230-
input: R,
230+
mut decoder: R,
231231
mut output: W,
232+
codec_name: &str,
232233
) -> Result<(), Report<TrustedServerError>> {
233-
use flate2::read::GzDecoder;
234-
235-
// Decompress input
236-
let mut decoder = GzDecoder::new(input);
237234
let mut decompressed = Vec::new();
238235
decoder
239236
.read_to_end(&mut decompressed)
240237
.change_context(TrustedServerError::Proxy {
241-
message: "Failed to decompress gzip".to_string(),
238+
message: format!("Failed to decompress {codec_name}"),
242239
})?;
243240

244-
log::info!("Decompressed size: {} bytes", decompressed.len());
241+
log::info!(
242+
"{codec_name} decompressed size: {} bytes",
243+
decompressed.len()
244+
);
245245

246-
// Process the decompressed content
247246
let processed = self
248247
.processor
249248
.process_chunk(&decompressed, true)
250249
.change_context(TrustedServerError::Proxy {
251250
message: "Failed to process content".to_string(),
252251
})?;
253252

254-
log::info!("Processed size: {} bytes", processed.len());
253+
log::info!("{codec_name} processed size: {} bytes", processed.len());
255254

256-
// Write uncompressed output
257255
output
258256
.write_all(&processed)
259257
.change_context(TrustedServerError::Proxy {
@@ -263,6 +261,17 @@ impl<P: StreamProcessor> StreamingPipeline<P> {
263261
Ok(())
264262
}
265263

264+
/// Process gzip compressed input to uncompressed output (decompression only)
265+
fn process_gzip_to_none<R: Read, W: Write>(
266+
&mut self,
267+
input: R,
268+
output: W,
269+
) -> Result<(), Report<TrustedServerError>> {
270+
use flate2::read::GzDecoder;
271+
272+
self.decompress_and_process(GzDecoder::new(input), output, "gzip")
273+
}
274+
266275
/// Process deflate compressed stream
267276
fn process_deflate_to_deflate<R: Read, W: Write>(
268277
&mut self,
@@ -283,42 +292,11 @@ impl<P: StreamProcessor> StreamingPipeline<P> {
283292
fn process_deflate_to_none<R: Read, W: Write>(
284293
&mut self,
285294
input: R,
286-
mut output: W,
295+
output: W,
287296
) -> Result<(), Report<TrustedServerError>> {
288297
use flate2::read::ZlibDecoder;
289298

290-
// Decompress input
291-
let mut decoder = ZlibDecoder::new(input);
292-
let mut decompressed = Vec::new();
293-
decoder
294-
.read_to_end(&mut decompressed)
295-
.change_context(TrustedServerError::Proxy {
296-
message: "Failed to decompress deflate".to_string(),
297-
})?;
298-
299-
log::info!(
300-
"Deflate->None decompressed size: {} bytes",
301-
decompressed.len()
302-
);
303-
304-
// Process the decompressed content
305-
let processed = self
306-
.processor
307-
.process_chunk(&decompressed, true)
308-
.change_context(TrustedServerError::Proxy {
309-
message: "Failed to process content".to_string(),
310-
})?;
311-
312-
log::info!("Deflate->None processed size: {} bytes", processed.len());
313-
314-
// Write uncompressed output
315-
output
316-
.write_all(&processed)
317-
.change_context(TrustedServerError::Proxy {
318-
message: "Failed to write output".to_string(),
319-
})?;
320-
321-
Ok(())
299+
self.decompress_and_process(ZlibDecoder::new(input), output, "deflate")
322300
}
323301

324302
/// Process brotli compressed stream
@@ -346,42 +324,11 @@ impl<P: StreamProcessor> StreamingPipeline<P> {
346324
fn process_brotli_to_none<R: Read, W: Write>(
347325
&mut self,
348326
input: R,
349-
mut output: W,
327+
output: W,
350328
) -> Result<(), Report<TrustedServerError>> {
351329
use brotli::Decompressor;
352330

353-
// Decompress input
354-
let mut decoder = Decompressor::new(input, 4096);
355-
let mut decompressed = Vec::new();
356-
decoder
357-
.read_to_end(&mut decompressed)
358-
.change_context(TrustedServerError::Proxy {
359-
message: "Failed to decompress brotli".to_string(),
360-
})?;
361-
362-
log::info!(
363-
"Brotli->None decompressed size: {} bytes",
364-
decompressed.len()
365-
);
366-
367-
// Process the decompressed content
368-
let processed = self
369-
.processor
370-
.process_chunk(&decompressed, true)
371-
.change_context(TrustedServerError::Proxy {
372-
message: "Failed to process content".to_string(),
373-
})?;
374-
375-
log::info!("Brotli->None processed size: {} bytes", processed.len());
376-
377-
// Write uncompressed output
378-
output
379-
.write_all(&processed)
380-
.change_context(TrustedServerError::Proxy {
381-
message: "Failed to write output".to_string(),
382-
})?;
383-
384-
Ok(())
331+
self.decompress_and_process(Decompressor::new(input, 4096), output, "brotli")
385332
}
386333

387334
/// Generic processing through compression layers

0 commit comments

Comments
 (0)