Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ license = "BSD-3-Clause"
readme = "README.md"
homepage = "https://github.com/cloudflare/daphne"
repository = "https://github.com/cloudflare/daphne"
rust-version = "1.80"

[profile.release]
opt-level = "s"
Expand Down
2 changes: 1 addition & 1 deletion crates/dapf/src/http_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub struct HttpClient {
inner: HttpClientInner,
}

#[expect(clippy::large_enum_variant)]
#[allow(clippy::large_enum_variant)]
enum HttpClientInner {
/// Never reuse the same reqwest client for two different http requests. Usefull for specific
/// debugging or load testing scenarios.
Expand Down
2 changes: 1 addition & 1 deletion crates/dapf/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ enum HpkeAction {
}

#[derive(Debug, Subcommand)]
#[expect(clippy::large_enum_variant)]
#[allow(clippy::large_enum_variant)]
enum TestAction {
/// Add an hpke config to a test-utils enabled `daphne-server`.
AddHpkeConfig {
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne-server/docker/example-service.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Copyright (c) 2024 Cloudflare, Inc. All rights reserved.
# SPDX-License-Identifier: BSD-3-Clause

FROM rust:1.82-bookworm AS builder
FROM rust:1.80-bookworm AS builder

RUN apt update && \
apt install -y \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ impl Cache {
);
}

#[expect(dead_code)]
#[allow(dead_code)]
pub fn delete<P>(&mut self, key: &str) -> CacheResult<P::Value>
where
P: KvPrefix,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -487,8 +487,10 @@ mod test {
Ok(Some(Marc::new(fut.await)))
})
.await;
let Ok(s) = result.map(|opt| *opt.unwrap()).map_err(|e| *e);
s
match result.map(|opt| *opt.unwrap()).map_err(|e| *e) {
Ok(s) => s,
Err(infallible) => match infallible {},
}
}
}
}
4 changes: 2 additions & 2 deletions crates/daphne-server/src/storage_proxy_connection/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ impl<'h> Do<'h> {
}
}

#[expect(dead_code)]
#[allow(dead_code)]
pub fn with_retry(self) -> Self {
Self {
retry: true,
Expand Down Expand Up @@ -126,7 +126,7 @@ impl Do<'_> {
}
}

#[expect(dead_code)]
#[allow(dead_code)]
pub fn request_with_id<B: DurableMethod + Copy>(
&self,
path: B,
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne-server/tests/e2e/e2e.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1437,7 +1437,7 @@ async fn leader_collect_taskprov_ok(version: DapVersion) {
.poll_collection_url_using_token(client, &collect_uri, DAP_TASKPROV_COLLECTOR_TOKEN)
.await
.unwrap();
#[expect(clippy::format_in_format_args)]
#[allow(clippy::format_in_format_args)]
{
assert_eq!(
resp.status(),
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne-server/tests/e2e/test_runner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,7 @@ impl TestRunner {
Ok(())
}

#[expect(clippy::too_many_arguments)]
#[allow(clippy::too_many_arguments)]
pub async fn leader_request_expect_abort(
&self,
client: &reqwest::Client,
Expand Down
4 changes: 2 additions & 2 deletions crates/daphne-worker-test/docker/aggregator.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
# Copyright (c) 2025 Cloudflare, Inc. All rights reserved.
# SPDX-License-Identifier: BSD-3-Clause

FROM rust:1.83-bookworm AS builder
FROM rust:1.80-bookworm AS builder
RUN apt update && apt install -y capnproto clang cmake

# Pre-install worker-build and Rust's wasm32 target to speed up our custom build command
RUN rustup target add wasm32-unknown-unknown
RUN cargo install --git https://github.com/cloudflare/workers-rs
RUN cargo install worker-build@0.1.1 --locked

# Build the worker.
WORKDIR /tmp/dap_test
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne-worker-test/docker/runtests.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Copyright (c) 2024 Cloudflare, Inc. All rights reserved.
# SPDX-License-Identifier: BSD-3-Clause

FROM rust:1.82-bookworm
FROM rust:1.80-bookworm

WORKDIR /tmp/dap_test

Expand Down
4 changes: 2 additions & 2 deletions crates/daphne-worker-test/docker/storage-proxy.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
# Copyright (c) 2024 Cloudflare, Inc. All rights reserved.
# SPDX-License-Identifier: BSD-3-Clause

FROM rust:1.82-bookworm AS builder
FROM rust:1.80-bookworm AS builder
RUN apt update && apt install -y capnproto clang cmake

# Pre-install worker-build and Rust's wasm32 target to speed up our custom build command
RUN rustup target add wasm32-unknown-unknown
RUN cargo install --git https://github.com/cloudflare/workers-rs
RUN cargo install worker-build@0.1.1 --locked

# Build the storage proxy.
WORKDIR /tmp/dap_test
Expand Down
7 changes: 5 additions & 2 deletions crates/daphne-worker/src/aggregator/router/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ pub async fn handle_dap_request(app: App, req: HttpRequest) -> Response {
}

let aggregator = Arc::new(app);
let Ok(response) = router
let response = router
.with_state(aggregator.clone())
.layer(
tower::ServiceBuilder::new().layer(axum::middleware::from_fn_with_state(
Expand All @@ -129,7 +129,10 @@ pub async fn handle_dap_request(app: App, req: HttpRequest) -> Response {
.oneshot(req)
.await;

response
match response {
Ok(response) => response,
Err(infallible) => match infallible {},
}
}

struct AxumDapResponse(axum::response::Response);
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne-worker/src/storage/kv/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ impl Cache {
);
}

#[expect(dead_code)]
#[allow(dead_code)]
pub fn delete<P>(&mut self, key: &str) -> CacheResult<P::Value>
where
P: KvPrefix,
Expand Down
6 changes: 3 additions & 3 deletions crates/daphne-worker/src/storage/kv/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ impl<'h> Kv<'h> {
self.put_internal::<P>(key, value, Some(expiration)).await
}

#[cfg_attr(not(feature = "test-utils"), expect(dead_code))]
#[cfg_attr(not(feature = "test-utils"), allow(dead_code))]
pub async fn put<P>(&self, key: &P::Key, value: P::Value) -> Result<Marc<P::Value>, Error>
where
P: KvPrefix,
Expand Down Expand Up @@ -395,7 +395,7 @@ impl<'h> Kv<'h> {
}
}

#[cfg_attr(not(feature = "test-utils"), expect(dead_code))]
#[cfg_attr(not(feature = "test-utils"), allow(dead_code))]
pub async fn put_if_not_exists_with_expiration<P>(
&self,
key: &P::Key,
Expand All @@ -411,7 +411,7 @@ impl<'h> Kv<'h> {
.await
}

#[cfg_attr(not(feature = "test-utils"), expect(dead_code))]
#[cfg_attr(not(feature = "test-utils"), allow(dead_code))]
pub async fn put_if_not_exists<P>(
&self,
key: &P::Key,
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne-worker/src/storage/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ impl Do<'_> {
}
}

#[expect(dead_code)]
#[allow(dead_code)]
pub fn request_with_id<B: DurableMethod + Copy>(
&self,
path: B,
Expand Down
16 changes: 12 additions & 4 deletions crates/daphne-worker/src/storage_proxy/middleware.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,10 @@ pub async fn bearer_auth(
return (StatusCode::UNAUTHORIZED, "Incorrect authorization token").into_response();
}

let Ok(response) = next.call(request.map(axum::body::Body::new)).await;
response
match next.call(request.map(axum::body::Body::new)).await {
Ok(r) => r,
Err(infalible) => match infalible {},
}
}

#[worker::send]
Expand All @@ -58,7 +60,10 @@ pub async fn time_kv_requests(
mut next: Next,
) -> axum::response::Response {
let start = worker::Date::now();
let Ok(response) = next.call(request).await;
let response = match next.call(request).await {
Ok(r) => r,
Err(infalible) => match infalible {},
};
let elapsed = elapsed(&start);

let op = match method {
Expand Down Expand Up @@ -90,7 +95,10 @@ pub async fn time_do_requests(
mut next: Next,
) -> axum::response::Response {
let start = worker::Date::now();
let Ok(response) = next.call(request).await;
let response = match next.call(request).await {
Ok(r) => r,
Err(infalible) => match infalible {},
};
let elapsed = elapsed(&start);
ctx.metrics.durable_request_time_seconds_observe(
&uri,
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne/benches/pine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use prio_draft09::{

fn pine(c: &mut Criterion) {
// NOTE We ignore this clippy warning because we may want to benchmark more parameters later.
#[expect(clippy::single_element_loop)]
#[allow(clippy::single_element_loop)]
for (dimension, chunk_len, chunk_len_sq_norm_equal) in [(200_000, 150 * 2, 447 * 18)] {
let pine =
Pine::new_64(1 << 15, dimension, 15, chunk_len, chunk_len_sq_norm_equal).unwrap();
Expand Down
4 changes: 2 additions & 2 deletions crates/daphne/benches/vdaf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ use prio_draft09::{
fn count_vec(c: &mut Criterion) {
for dimension in [100, 1_000, 10_000, 100_000] {
let nonce = [0; 16];
#[expect(clippy::cast_possible_truncation)]
#[expect(clippy::cast_sign_loss)]
#[allow(clippy::cast_possible_truncation)]
#[allow(clippy::cast_sign_loss)]
let chunk_length = (dimension as f64).sqrt() as usize; // asymptotically optimal

// Prio2
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne/src/messages/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -841,7 +841,7 @@ impl TryFrom<ReportErrorLatest> for ReportError {
}
}

#[expect(clippy::match_wildcard_for_single_variants)]
#[allow(clippy::match_wildcard_for_single_variants)]
impl TryFrom<&ReportError> for ReportErrorLatest {
type Error = CodecError;

Expand Down
14 changes: 7 additions & 7 deletions crates/daphne/src/pine/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,8 +211,8 @@ impl<F: FftFriendlyFieldElement, X, const SEED_SIZE: usize> Pine<F, X, SEED_SIZE
};

let (wr_test_bound, wr_test_bits) = {
#[expect(clippy::cast_possible_truncation)]
#[expect(clippy::cast_sign_loss)]
#[allow(clippy::cast_possible_truncation)]
#[allow(clippy::cast_sign_loss)]
let wr_test_bound_int =
(((param.norm_bound as f64) * ALPHA).ceil() as u64 + 1).next_power_of_two();
let wr_test_bits = bits(2 * wr_test_bound_int - 1);
Expand Down Expand Up @@ -308,8 +308,8 @@ fn f64_to_field<F: FftFriendlyFieldElement>(x: f64, two_to_frac_bits: f64) -> Re
let out = x * two_to_frac_bits;
let out = out.floor();
let out = if neg { -out } else { out };
#[expect(clippy::cast_possible_truncation)]
#[expect(clippy::cast_sign_loss)]
#[allow(clippy::cast_possible_truncation)]
#[allow(clippy::cast_sign_loss)]
let out = out as u64;
let out = usize::try_from(out).map_err(|e| {
VdafError::Uncategorized(format!(
Expand Down Expand Up @@ -375,8 +375,8 @@ fn norm_bound_f64_to_u64(norm_bound: f64, frac_bits: usize) -> u64 {
let two_to_frac_bits = f64::from(1 << frac_bits);
let norm_bound = norm_bound * two_to_frac_bits;
let norm_bound = norm_bound.floor();
#[expect(clippy::cast_sign_loss)]
#[expect(clippy::cast_possible_truncation)]
#[allow(clippy::cast_sign_loss)]
#[allow(clippy::cast_possible_truncation)]
let norm_bound = norm_bound as u64;
norm_bound
}
Expand Down Expand Up @@ -527,7 +527,7 @@ mod tests {
},
] {
// clippy: We expect the values to match precisely.
#[expect(clippy::float_cmp)]
#[allow(clippy::float_cmp)]
{
assert_eq!(
field_to_f64(
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne/src/pine/test_vec/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ impl TestVec {
// Check that the test vector parameters have the values we expect.
//
// clippy: These are test vectors, so we expect the value to match precisely.
#[expect(clippy::float_cmp)]
#[allow(clippy::float_cmp)]
{
assert_eq!(self.alpha, ALPHA);
}
Expand Down
6 changes: 3 additions & 3 deletions crates/daphne/src/protocol/aggregator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ impl ReplayProtection {
impl DapTaskConfig {
/// Leader -> Helper: Initialize the aggregation flow for a sequence of reports. The outputs are the Leader's
/// state for the aggregation flow and the outbound `AggregationJobInitReq` message.
#[expect(clippy::too_many_arguments)]
#[allow(clippy::too_many_arguments)]
pub fn produce_agg_job_req<S>(
&self,
decrypter: impl HpkeDecrypter,
Expand All @@ -85,7 +85,7 @@ impl DapTaskConfig {
)
}

#[expect(clippy::too_many_arguments)]
#[allow(clippy::too_many_arguments)]
fn produce_agg_job_req_impl<S>(
&self,
decrypter: impl HpkeDecrypter,
Expand Down Expand Up @@ -195,7 +195,7 @@ impl DapTaskConfig {
))
}

#[expect(clippy::too_many_arguments)]
#[allow(clippy::too_many_arguments)]
#[cfg(any(test, feature = "test-utils"))]
pub fn test_produce_agg_job_req<S>(
&self,
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne/src/protocol/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl VdafConfig {
}

/// Generate a report for the given public and input shares with the given extensions.
#[expect(clippy::too_many_arguments)]
#[allow(clippy::too_many_arguments)]
pub(crate) fn produce_report_with_extensions_for_shares(
public_share: Vec<u8>,
input_shares: [Vec<u8>; 2],
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne/src/protocol/collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl VdafConfig {
/// Aggregators. The first encrypted aggregate shares must be the Leader's.
///
/// * `version` is the `DapVersion` to use.
#[expect(clippy::too_many_arguments)]
#[allow(clippy::too_many_arguments)]
pub fn consume_encrypted_agg_shares(
&self,
decrypter: &impl HpkeDecrypter,
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne/src/protocol/report_init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use std::{
/// The `Peer` parameter can be:
/// - `()` if the report came from a client.
/// - [`WithPeerPrepShare`] if the report came from the leader.
#[expect(clippy::large_enum_variant)]
#[allow(clippy::large_enum_variant)]
#[derive(Clone)]
#[cfg_attr(any(test, feature = "test-utils"), derive(Debug, deepsize::DeepSizeOf))]
pub enum InitializedReport<Peer> {
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne/src/testing/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ pub struct AggregationJobTest {
pub(crate) valid_report_range: Range<Time>,

// operational parameters
#[cfg_attr(not(test), expect(dead_code))]
#[cfg_attr(not(test), allow(dead_code))]
pub(crate) leader_registry: prometheus::Registry,
pub(crate) leader_metrics: DaphnePromMetrics,
}
Expand Down
2 changes: 1 addition & 1 deletion crates/daphne/src/testing/report_generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ impl Iterator for ReportGenerator {
}

impl ReportGenerator {
#[expect(clippy::too_many_arguments)]
#[allow(clippy::too_many_arguments)]
pub fn new(
vdaf: &VdafConfig,
hpke_config_list: &[HpkeConfig; 2],
Expand Down
Loading
Loading