Skip to content

Commit 040b1ba

Browse files
committed
Move HashedAggregationJobReq to roles/helper
1 parent bdd6738 commit 040b1ba

File tree

11 files changed

+101
-91
lines changed

11 files changed

+101
-91
lines changed

crates/daphne-server/src/roles/helper.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33

44
use axum::async_trait;
55
use daphne::{
6-
messages::{request::AggregationJobRequestHash, AggregationJobId, TaskId},
7-
roles::DapHelper,
6+
messages::{AggregationJobId, TaskId},
7+
roles::{helper::AggregationJobRequestHash, DapHelper},
88
DapError, DapVersion,
99
};
1010

crates/daphne-server/src/router/extractor.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,11 @@ use daphne::{
1313
error::DapAbort,
1414
fatal_error,
1515
messages::{
16-
request::{CollectionPollReq, HashedAggregationJobReq, RequestBody},
16+
request::{CollectionPollReq, RequestBody},
1717
taskprov::TaskprovAdvertisement,
1818
AggregateShareReq, AggregationJobInitReq, CollectionReq, Report, TaskId,
1919
},
20+
roles::helper::HashedAggregationJobReq,
2021
DapError, DapRequest, DapRequestMeta, DapVersion,
2122
};
2223
use daphne_service_utils::{bearer_token::BearerToken, http_headers};

crates/daphne-server/src/router/helper.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,11 @@ use axum::{
88
routing::{post, put},
99
};
1010
use daphne::{
11-
messages::{request::HashedAggregationJobReq, AggregateShareReq},
12-
roles::{helper, DapHelper},
11+
messages::AggregateShareReq,
12+
roles::{
13+
helper::{self, HashedAggregationJobReq},
14+
DapHelper,
15+
},
1316
};
1417
use http::StatusCode;
1518

crates/daphne-worker/src/aggregator/roles/helper.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ use crate::aggregator::App;
55
use daphne::{
66
error::DapAbort,
77
fatal_error,
8-
messages::{request::AggregationJobRequestHash, AggregationJobId, TaskId},
9-
roles::DapHelper,
8+
messages::{AggregationJobId, TaskId},
9+
roles::{helper::AggregationJobRequestHash, DapHelper},
1010
DapError, DapVersion,
1111
};
1212
use daphne_service_utils::durable_requests::bindings::aggregation_job_store;

crates/daphne-worker/src/aggregator/router/extractor.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,11 @@ use daphne::{
1111
error::DapAbort,
1212
fatal_error,
1313
messages::{
14-
request::{CollectionPollReq, HashedAggregationJobReq, RequestBody},
14+
request::{CollectionPollReq, RequestBody},
1515
taskprov::TaskprovAdvertisement,
1616
AggregateShareReq, AggregationJobInitReq, CollectionReq, Report, TaskId,
1717
},
18+
roles::helper::HashedAggregationJobReq,
1819
DapError, DapRequest, DapRequestMeta, DapVersion,
1920
};
2021
use daphne_service_utils::{bearer_token::BearerToken, http_headers};

crates/daphne-worker/src/aggregator/router/helper.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,11 @@ use axum::{
1313
use daphne::{
1414
fatal_error,
1515
hpke::HpkeProvider,
16-
messages::{request::HashedAggregationJobReq, AggregateShareReq},
17-
roles::{helper, DapAggregator, DapHelper},
16+
messages::AggregateShareReq,
17+
roles::{
18+
helper::{self, HashedAggregationJobReq},
19+
DapAggregator, DapHelper,
20+
},
1821
DapError, DapResponse,
1922
};
2023
use daphne_service_utils::compute_offload;

crates/daphne/src/messages/request.rs

Lines changed: 4 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,10 @@ use super::{
77
taskprov::TaskprovAdvertisement, AggregateShareReq, AggregationJobId, AggregationJobInitReq,
88
CollectionJobId, CollectionReq, Report,
99
};
10-
use crate::{constants::DapMediaType, error::DapAbort, messages::TaskId, DapVersion};
11-
use prio::codec::{ParameterizedDecode, ParameterizedEncode};
10+
use crate::{
11+
constants::DapMediaType, error::DapAbort, messages::TaskId,
12+
roles::helper::HashedAggregationJobReq, DapVersion,
13+
};
1214

1315
pub trait RequestBody {
1416
type ResourceId;
@@ -25,69 +27,6 @@ macro_rules! impl_req_body {
2527
};
2628
}
2729

28-
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
29-
#[cfg_attr(any(test, feature = "test-utils"), derive(deepsize::DeepSizeOf))]
30-
pub struct AggregationJobRequestHash(Vec<u8>);
31-
32-
impl AggregationJobRequestHash {
33-
pub fn get(&self) -> &[u8] {
34-
&self.0
35-
}
36-
37-
fn hash(bytes: &[u8]) -> Self {
38-
Self(
39-
ring::digest::digest(&ring::digest::SHA256, bytes)
40-
.as_ref()
41-
.to_vec(),
42-
)
43-
}
44-
}
45-
46-
pub struct HashedAggregationJobReq {
47-
pub request: AggregationJobInitReq,
48-
pub hash: AggregationJobRequestHash,
49-
}
50-
51-
impl HashedAggregationJobReq {
52-
#[cfg(any(test, feature = "test-utils"))]
53-
pub fn from_aggregation_req(version: DapVersion, request: AggregationJobInitReq) -> Self {
54-
let mut buf = Vec::new();
55-
request.encode_with_param(&version, &mut buf).unwrap();
56-
Self {
57-
request,
58-
hash: AggregationJobRequestHash::hash(&buf),
59-
}
60-
}
61-
}
62-
63-
impl ParameterizedEncode<DapVersion> for HashedAggregationJobReq {
64-
fn encode_with_param(
65-
&self,
66-
encoding_parameter: &DapVersion,
67-
bytes: &mut Vec<u8>,
68-
) -> Result<(), prio::codec::CodecError> {
69-
self.request.encode_with_param(encoding_parameter, bytes)
70-
}
71-
}
72-
73-
impl ParameterizedDecode<DapVersion> for HashedAggregationJobReq {
74-
fn decode_with_param(
75-
decoding_parameter: &DapVersion,
76-
bytes: &mut std::io::Cursor<&[u8]>,
77-
) -> Result<Self, prio::codec::CodecError> {
78-
let start = usize::try_from(bytes.position())
79-
.map_err(|e| prio::codec::CodecError::Other(Box::new(e)))?;
80-
let request = AggregationJobInitReq::decode_with_param(decoding_parameter, bytes)?;
81-
let end = usize::try_from(bytes.position())
82-
.map_err(|e| prio::codec::CodecError::Other(Box::new(e)))?;
83-
84-
Ok(Self {
85-
request,
86-
hash: AggregationJobRequestHash::hash(&bytes.get_ref()[start..end]),
87-
})
88-
}
89-
}
90-
9130
impl_req_body! {
9231
// body type | id type
9332
// --------------------| ----------------

crates/daphne/src/roles/helper/handle_agg_job.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
// Copyright (c) 2025 Cloudflare, Inc. All rights reserved.
22
// SPDX-License-Identifier: BSD-3-Clause
33

4-
use super::{check_part_batch, DapHelper};
4+
use super::{check_part_batch, DapHelper, HashedAggregationJobReq};
55
use crate::{
66
error::DapAbort,
77
messages::{
8-
request::HashedAggregationJobReq, AggregationJobInitReq, AggregationJobResp,
9-
PartialBatchSelector, ReportError, TaskId, TransitionVar,
8+
AggregationJobInitReq, AggregationJobResp, PartialBatchSelector, ReportError, TaskId,
9+
TransitionVar,
1010
},
1111
metrics::ReportStatus,
1212
protocol::aggregator::ReportProcessedStatus,

crates/daphne/src/roles/helper/mod.rs

Lines changed: 67 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,79 @@ use crate::{
1111
constants::DapMediaType,
1212
error::DapAbort,
1313
messages::{
14-
constant_time_eq,
15-
request::{AggregationJobRequestHash, HashedAggregationJobReq},
16-
AggregateShare, AggregateShareReq, AggregationJobId, PartialBatchSelector, TaskId,
14+
constant_time_eq, AggregateShare, AggregateShareReq, AggregationJobId,
15+
AggregationJobInitReq, PartialBatchSelector, TaskId,
1716
},
1817
metrics::{DaphneRequestType, ReportStatus},
1918
protocol::aggregator::ReplayProtection,
2019
DapAggregationParam, DapError, DapRequest, DapResponse, DapTaskConfig, DapVersion,
2120
};
2221

22+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
23+
#[cfg_attr(any(test, feature = "test-utils"), derive(deepsize::DeepSizeOf))]
24+
pub struct AggregationJobRequestHash(Vec<u8>);
25+
26+
impl AggregationJobRequestHash {
27+
pub fn get(&self) -> &[u8] {
28+
&self.0
29+
}
30+
31+
fn hash(bytes: &[u8]) -> Self {
32+
Self(
33+
ring::digest::digest(&ring::digest::SHA256, bytes)
34+
.as_ref()
35+
.to_vec(),
36+
)
37+
}
38+
}
39+
40+
/// An [`AggregationJobInitReq`] and its hash. Used by the helper to prevent the parameters of an
41+
/// aggregation job from changing.
42+
pub struct HashedAggregationJobReq {
43+
pub request: AggregationJobInitReq,
44+
pub hash: AggregationJobRequestHash,
45+
}
46+
47+
impl HashedAggregationJobReq {
48+
#[cfg(any(test, feature = "test-utils"))]
49+
pub fn from_aggregation_req(version: DapVersion, request: AggregationJobInitReq) -> Self {
50+
let mut buf = Vec::new();
51+
request.encode_with_param(&version, &mut buf).unwrap();
52+
Self {
53+
request,
54+
hash: AggregationJobRequestHash::hash(&buf),
55+
}
56+
}
57+
}
58+
59+
impl ParameterizedEncode<DapVersion> for HashedAggregationJobReq {
60+
fn encode_with_param(
61+
&self,
62+
encoding_parameter: &DapVersion,
63+
bytes: &mut Vec<u8>,
64+
) -> Result<(), prio::codec::CodecError> {
65+
self.request.encode_with_param(encoding_parameter, bytes)
66+
}
67+
}
68+
69+
impl ParameterizedDecode<DapVersion> for HashedAggregationJobReq {
70+
fn decode_with_param(
71+
decoding_parameter: &DapVersion,
72+
bytes: &mut std::io::Cursor<&[u8]>,
73+
) -> Result<Self, prio::codec::CodecError> {
74+
let start = usize::try_from(bytes.position())
75+
.map_err(|e| prio::codec::CodecError::Other(Box::new(e)))?;
76+
let request = AggregationJobInitReq::decode_with_param(decoding_parameter, bytes)?;
77+
let end = usize::try_from(bytes.position())
78+
.map_err(|e| prio::codec::CodecError::Other(Box::new(e)))?;
79+
80+
Ok(Self {
81+
request,
82+
hash: AggregationJobRequestHash::hash(&bytes.get_ref()[start..end]),
83+
})
84+
}
85+
}
86+
2387
/// DAP Helper functionality.
2488
#[async_trait]
2589
pub trait DapHelper: DapAggregator {

crates/daphne/src/roles/mod.rs

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -136,13 +136,12 @@ mod test {
136136
constants::DapMediaType,
137137
hpke::{HpkeKemId, HpkeProvider, HpkeReceiverConfig},
138138
messages::{
139-
request::{HashedAggregationJobReq, RequestBody},
140-
AggregateShareReq, AggregationJobId, AggregationJobInitReq, AggregationJobResp,
141-
BatchId, BatchSelector, Collection, CollectionJobId, CollectionReq, Extension,
142-
HpkeCiphertext, Interval, PartialBatchSelector, Query, Report, ReportError, TaskId,
143-
Time, TransitionVar,
139+
request::RequestBody, AggregateShareReq, AggregationJobId, AggregationJobInitReq,
140+
AggregationJobResp, BatchId, BatchSelector, Collection, CollectionJobId, CollectionReq,
141+
Extension, HpkeCiphertext, Interval, PartialBatchSelector, Query, Report, ReportError,
142+
TaskId, Time, TransitionVar,
144143
},
145-
roles::{leader::WorkItem, DapAggregator},
144+
roles::{helper::HashedAggregationJobReq, leader::WorkItem, DapAggregator},
146145
testing::InMemoryAggregator,
147146
vdaf::{Prio3Config, VdafConfig},
148147
DapAbort, DapAggregationJobState, DapAggregationParam, DapBatchBucket, DapBatchMode,

0 commit comments

Comments
 (0)