|
| 1 | +// Copyright (c) 2025 Cloudflare, Inc. All rights reserved. |
| 2 | +// SPDX-License-Identifier: BSD-3-Clause |
| 3 | + |
| 4 | +use crate::{ |
| 5 | + aggregation_job_store_capnp::new_job_request, |
| 6 | + capnproto::{CapnprotoPayloadDecode, CapnprotoPayloadEncode}, |
| 7 | + durable_requests::ObjectIdFrom, |
| 8 | +}; |
| 9 | +use daphne::{ |
| 10 | + messages::{AggregationJobId, AggregationJobInitReq, PartialBatchSelector, TaskId}, |
| 11 | + DapVersion, |
| 12 | +}; |
| 13 | +use serde::{Deserialize, Serialize}; |
| 14 | +use std::{ops::Deref, slice}; |
| 15 | + |
| 16 | +super::define_do_binding! { |
| 17 | + const BINDING = "AGGREGATION_JOB_SET"; |
| 18 | + |
| 19 | + enum Command { |
| 20 | + NewJob = "/new-job", |
| 21 | + ListJobIds = "/job-ids", |
| 22 | + } |
| 23 | + |
| 24 | + fn name((version, task_id): (DapVersion, &'n TaskId)) -> ObjectIdFrom { |
| 25 | + ObjectIdFrom::Name(format!("{version}/task/{task_id}")) |
| 26 | + } |
| 27 | +} |
| 28 | + |
| 29 | +#[derive(Debug)] |
| 30 | +pub struct AggregationJobReqHash(Vec<u8>); |
| 31 | + |
| 32 | +impl Deref for AggregationJobReqHash { |
| 33 | + type Target = [u8]; |
| 34 | + fn deref(&self) -> &Self::Target { |
| 35 | + &self.0 |
| 36 | + } |
| 37 | +} |
| 38 | + |
| 39 | +impl From<&AggregationJobInitReq> for AggregationJobReqHash { |
| 40 | + fn from(req: &AggregationJobInitReq) -> Self { |
| 41 | + let AggregationJobInitReq { |
| 42 | + agg_param, |
| 43 | + part_batch_sel, |
| 44 | + prep_inits, |
| 45 | + } = req; |
| 46 | + |
| 47 | + let mut context = ring::digest::Context::new(&ring::digest::SHA256); |
| 48 | + context.update(agg_param); |
| 49 | + context.update(match part_batch_sel { |
| 50 | + PartialBatchSelector::TimeInterval => &[0], |
| 51 | + PartialBatchSelector::LeaderSelectedByBatchId { batch_id } => batch_id.as_ref(), |
| 52 | + }); |
| 53 | + for p in prep_inits { |
| 54 | + let daphne::messages::PrepareInit { |
| 55 | + report_share: |
| 56 | + daphne::messages::ReportShare { |
| 57 | + report_metadata: daphne::messages::ReportMetadata { id, time }, |
| 58 | + public_share, |
| 59 | + encrypted_input_share: |
| 60 | + daphne::messages::HpkeCiphertext { |
| 61 | + config_id, |
| 62 | + enc, |
| 63 | + payload: cypher_text_payload, |
| 64 | + }, |
| 65 | + }, |
| 66 | + payload, |
| 67 | + } = p; |
| 68 | + |
| 69 | + context.update(payload); |
| 70 | + context.update(public_share); |
| 71 | + context.update(id.as_ref()); |
| 72 | + context.update(&time.to_be_bytes()); |
| 73 | + context.update(cypher_text_payload); |
| 74 | + context.update(slice::from_ref(config_id)); |
| 75 | + context.update(enc); |
| 76 | + } |
| 77 | + Self(context.finish().as_ref().to_vec()) |
| 78 | + } |
| 79 | +} |
| 80 | + |
| 81 | +#[derive(Debug)] |
| 82 | +pub struct NewJobRequest { |
| 83 | + pub id: AggregationJobId, |
| 84 | + pub agg_job_hash: AggregationJobReqHash, |
| 85 | +} |
| 86 | + |
| 87 | +impl CapnprotoPayloadEncode for NewJobRequest { |
| 88 | + type Builder<'a> = new_job_request::Builder<'a>; |
| 89 | + |
| 90 | + fn encode_to_builder(&self, mut builder: Self::Builder<'_>) { |
| 91 | + self.id.encode_to_builder(builder.reborrow().init_id()); |
| 92 | + builder.set_agg_job_hash(&self.agg_job_hash.0); |
| 93 | + } |
| 94 | +} |
| 95 | + |
| 96 | +impl CapnprotoPayloadDecode for NewJobRequest { |
| 97 | + type Reader<'a> = new_job_request::Reader<'a>; |
| 98 | + |
| 99 | + fn decode_from_reader(reader: Self::Reader<'_>) -> capnp::Result<Self> |
| 100 | + where |
| 101 | + Self: Sized, |
| 102 | + { |
| 103 | + Ok(Self { |
| 104 | + id: <_>::decode_from_reader(reader.get_id()?)?, |
| 105 | + agg_job_hash: AggregationJobReqHash(reader.get_agg_job_hash()?.to_vec()), |
| 106 | + }) |
| 107 | + } |
| 108 | +} |
| 109 | + |
| 110 | +#[derive(Debug, Serialize, Deserialize)] |
| 111 | +pub enum NewJobResponse { |
| 112 | + Ok, |
| 113 | + /// Request would change an existing aggregation job's parameters. |
| 114 | + IllegalJobParameters, |
| 115 | +} |
0 commit comments