diff --git a/Cargo.lock b/Cargo.lock
index 6dd9c8ae..17ec065f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1575,7 +1575,7 @@ dependencies = [
[[package]]
name = "cb-bench-pbs"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"alloy",
"cb-common",
@@ -1592,7 +1592,7 @@ dependencies = [
[[package]]
name = "cb-cli"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"cb-common",
"clap",
@@ -1604,7 +1604,7 @@ dependencies = [
[[package]]
name = "cb-common"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"aes 0.8.4",
"alloy",
@@ -1652,7 +1652,7 @@ dependencies = [
[[package]]
name = "cb-metrics"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"axum 0.8.4",
"cb-common",
@@ -1665,7 +1665,7 @@ dependencies = [
[[package]]
name = "cb-pbs"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"alloy",
"async-trait",
@@ -1676,6 +1676,7 @@ dependencies = [
"ethereum_ssz",
"eyre",
"futures",
+ "headers",
"lazy_static",
"parking_lot",
"prometheus",
@@ -1692,7 +1693,7 @@ dependencies = [
[[package]]
name = "cb-signer"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"alloy",
"axum 0.8.4",
@@ -1721,7 +1722,7 @@ dependencies = [
[[package]]
name = "cb-tests"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"alloy",
"axum 0.8.4",
@@ -1881,7 +1882,7 @@ dependencies = [
[[package]]
name = "commit-boost"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"cb-cli",
"cb-common",
@@ -2169,7 +2170,7 @@ dependencies = [
[[package]]
name = "da_commit"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"alloy",
"color-eyre",
@@ -6097,7 +6098,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "status_api"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"async-trait",
"axum 0.8.4",
diff --git a/Cargo.toml b/Cargo.toml
index a4920cca..d94c8cdb 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -5,7 +5,7 @@ resolver = "2"
[workspace.package]
edition = "2024"
rust-version = "1.89"
-version = "0.9.0"
+version = "0.9.2"
[workspace.dependencies]
aes = "0.8"
diff --git a/crates/common/src/pbs/error.rs b/crates/common/src/pbs/error.rs
index 77d942cd..58066c4f 100644
--- a/crates/common/src/pbs/error.rs
+++ b/crates/common/src/pbs/error.rs
@@ -14,6 +14,9 @@ pub enum PbsError {
#[error("json decode error: {err:?}, raw: {raw}")]
JsonDecode { err: serde_json::Error, raw: String },
+ #[error("error with request: {0}")]
+ GeneralRequest(String),
+
#[error("{0}")]
ReadResponse(#[from] ResponseReadError),
diff --git a/crates/common/src/pbs/mod.rs b/crates/common/src/pbs/mod.rs
index af2c07b4..a1152b58 100644
--- a/crates/common/src/pbs/mod.rs
+++ b/crates/common/src/pbs/mod.rs
@@ -6,5 +6,6 @@ mod types;
pub use builder::*;
pub use constants::*;
+pub use lh_types::ForkVersionDecode;
pub use relay::*;
pub use types::*;
diff --git a/crates/common/src/types.rs b/crates/common/src/types.rs
index 077b4ccd..6d6d55f1 100644
--- a/crates/common/src/types.rs
+++ b/crates/common/src/types.rs
@@ -233,7 +233,8 @@ impl KnownChain {
pub fn fulu_fork_slot(&self) -> u64 {
match self {
- KnownChain::Mainnet | KnownChain::Helder => u64::MAX,
+ KnownChain::Mainnet => 13164544,
+ KnownChain::Helder => u64::MAX,
KnownChain::Holesky => 5283840,
KnownChain::Sepolia => 8724480,
KnownChain::Hoodi => 1622016,
diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs
index 291932d8..ddc93e1b 100644
--- a/crates/common/src/utils.rs
+++ b/crates/common/src/utils.rs
@@ -1,6 +1,7 @@
#[cfg(feature = "testing-flags")]
use std::cell::Cell;
use std::{
+ collections::HashSet,
fmt::Display,
net::Ipv4Addr,
str::FromStr,
@@ -45,9 +46,9 @@ use crate::{
types::{BlsPublicKey, Chain, Jwt, JwtClaims, ModuleId},
};
-const APPLICATION_JSON: &str = "application/json";
-const APPLICATION_OCTET_STREAM: &str = "application/octet-stream";
-const WILDCARD: &str = "*/*";
+pub const APPLICATION_JSON: &str = "application/json";
+pub const APPLICATION_OCTET_STREAM: &str = "application/octet-stream";
+pub const WILDCARD: &str = "*/*";
const MILLIS_PER_SECOND: u64 = 1_000;
pub const CONSENSUS_VERSION_HEADER: &str = "Eth-Consensus-Version";
@@ -433,36 +434,34 @@ pub fn get_user_agent_with_version(req_headers: &HeaderMap) -> eyre::Result
eyre::Result {
- let accept = Accept::from_str(
- req_headers.get(ACCEPT).and_then(|value| value.to_str().ok()).unwrap_or(APPLICATION_JSON),
- )
- .map_err(|e| eyre::eyre!("invalid accept header: {e}"))?;
-
- if accept.media_types().count() == 0 {
- // No valid media types found, default to JSON
- return Ok(EncodingType::Json);
- }
-
- // Get the SSZ and JSON media types if present
- let mut ssz_type = false;
- let mut json_type = false;
+pub fn get_accept_types(req_headers: &HeaderMap) -> eyre::Result> {
+ let mut accepted_types = HashSet::new();
let mut unsupported_type = false;
- accept.media_types().for_each(|mt| match mt.essence().to_string().as_str() {
- APPLICATION_OCTET_STREAM => ssz_type = true,
- APPLICATION_JSON | WILDCARD => json_type = true,
- _ => unsupported_type = true,
- });
-
- // If SSZ is present, prioritize it
- if ssz_type {
- return Ok(EncodingType::Ssz);
+ for header in req_headers.get_all(ACCEPT).iter() {
+ let accept = Accept::from_str(header.to_str()?)
+ .map_err(|e| eyre::eyre!("invalid accept header: {e}"))?;
+ for mt in accept.media_types() {
+ match mt.essence().to_string().as_str() {
+ APPLICATION_OCTET_STREAM => {
+ accepted_types.insert(EncodingType::Ssz);
+ }
+ APPLICATION_JSON | WILDCARD => {
+ accepted_types.insert(EncodingType::Json);
+ }
+ _ => unsupported_type = true,
+ };
+ }
}
- // If there aren't any unsupported types, use JSON
- if !unsupported_type {
- return Ok(EncodingType::Json);
+
+ if accepted_types.is_empty() {
+ if unsupported_type {
+ return Err(eyre::eyre!("unsupported accept type"));
+ }
+
+ // No accept header so just return the same type as the content type
+ accepted_types.insert(get_content_type(req_headers));
}
- Err(eyre::eyre!("unsupported accept type"))
+ Ok(accepted_types)
}
/// Parse CONTENT TYPE header to get the encoding type of the body, defaulting
@@ -490,7 +489,7 @@ pub fn get_consensus_version_header(req_headers: &HeaderMap) -> Option
/// Enum for types that can be used to encode incoming request bodies or
/// outgoing response bodies
-#[derive(Debug, Clone, Copy, PartialEq)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum EncodingType {
/// Body is UTF-8 encoded as JSON
Json,
@@ -499,21 +498,28 @@ pub enum EncodingType {
Ssz,
}
-impl std::fmt::Display for EncodingType {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+impl EncodingType {
+ /// Get the content type string for the encoding type
+ pub fn content_type(&self) -> &str {
match self {
- EncodingType::Json => write!(f, "application/json"),
- EncodingType::Ssz => write!(f, "application/octet-stream"),
+ EncodingType::Json => APPLICATION_JSON,
+ EncodingType::Ssz => APPLICATION_OCTET_STREAM,
}
}
}
+impl std::fmt::Display for EncodingType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.content_type())
+ }
+}
+
impl FromStr for EncodingType {
type Err = String;
fn from_str(value: &str) -> Result {
- match value {
- "application/json" | "" => Ok(EncodingType::Json),
- "application/octet-stream" => Ok(EncodingType::Ssz),
+ match value.to_ascii_lowercase().as_str() {
+ APPLICATION_JSON | "" => Ok(EncodingType::Json),
+ APPLICATION_OCTET_STREAM => Ok(EncodingType::Ssz),
_ => Err(format!("unsupported encoding type: {value}")),
}
}
@@ -636,8 +642,18 @@ pub fn bls_pubkey_from_hex_unchecked(hex: &str) -> BlsPublicKey {
#[cfg(test)]
mod test {
+ use axum::http::{HeaderMap, HeaderValue};
+ use reqwest::header::ACCEPT;
+
use super::{create_jwt, decode_jwt, validate_jwt};
- use crate::types::{Jwt, ModuleId};
+ use crate::{
+ types::{Jwt, ModuleId},
+ utils::{
+ APPLICATION_JSON, APPLICATION_OCTET_STREAM, EncodingType, WILDCARD, get_accept_types,
+ },
+ };
+
+ const APPLICATION_TEXT: &str = "application/text";
#[test]
fn test_jwt_validation() {
@@ -660,4 +676,100 @@ mod test {
assert!(response.is_err());
assert_eq!(response.unwrap_err().to_string(), "InvalidSignature");
}
+
+ /// Make sure a missing Accept header is interpreted as JSON
+ #[test]
+ fn test_missing_accept_header() {
+ let headers = HeaderMap::new();
+ let result = get_accept_types(&headers).unwrap();
+ assert_eq!(result.len(), 1);
+ assert!(result.contains(&EncodingType::Json));
+ }
+
+ /// Test accepting JSON
+ #[test]
+ fn test_accept_header_json() {
+ let mut headers = HeaderMap::new();
+ headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_JSON).unwrap());
+ let result = get_accept_types(&headers).unwrap();
+ assert_eq!(result.len(), 1);
+ assert!(result.contains(&EncodingType::Json));
+ }
+
+ /// Test accepting SSZ
+ #[test]
+ fn test_accept_header_ssz() {
+ let mut headers = HeaderMap::new();
+ headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_OCTET_STREAM).unwrap());
+ let result = get_accept_types(&headers).unwrap();
+ assert_eq!(result.len(), 1);
+ assert!(result.contains(&EncodingType::Ssz));
+ }
+
+ /// Test accepting wildcards
+ #[test]
+ fn test_accept_header_wildcard() {
+ let mut headers = HeaderMap::new();
+ headers.append(ACCEPT, HeaderValue::from_str(WILDCARD).unwrap());
+ let result = get_accept_types(&headers).unwrap();
+ assert_eq!(result.len(), 1);
+ assert!(result.contains(&EncodingType::Json));
+ }
+
+ /// Test accepting one header with multiple values
+ #[test]
+ fn test_accept_header_multiple_values() {
+ let header_string = format!("{APPLICATION_JSON}, {APPLICATION_OCTET_STREAM}");
+ let mut headers = HeaderMap::new();
+ headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap());
+ let result = get_accept_types(&headers).unwrap();
+ assert_eq!(result.len(), 2);
+ assert!(result.contains(&EncodingType::Json));
+ assert!(result.contains(&EncodingType::Ssz));
+ }
+
+ /// Test accepting multiple headers
+ #[test]
+ fn test_multiple_accept_headers() {
+ let mut headers = HeaderMap::new();
+ headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_JSON).unwrap());
+ headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_OCTET_STREAM).unwrap());
+ let result = get_accept_types(&headers).unwrap();
+ assert_eq!(result.len(), 2);
+ assert!(result.contains(&EncodingType::Json));
+ assert!(result.contains(&EncodingType::Ssz));
+ }
+
+ /// Test accepting one header with multiple values, including a type that
+ /// can't be used
+ #[test]
+ fn test_accept_header_multiple_values_including_unknown() {
+ let header_string =
+ format!("{APPLICATION_JSON}, {APPLICATION_OCTET_STREAM}, {APPLICATION_TEXT}");
+ let mut headers = HeaderMap::new();
+ headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap());
+ let result = get_accept_types(&headers).unwrap();
+ assert_eq!(result.len(), 2);
+ assert!(result.contains(&EncodingType::Json));
+ assert!(result.contains(&EncodingType::Ssz));
+ }
+
+ /// Test rejecting an unknown accept type
+ #[test]
+ fn test_invalid_accept_header_type() {
+ let mut headers = HeaderMap::new();
+ headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_TEXT).unwrap());
+ let result = get_accept_types(&headers);
+ assert!(result.is_err());
+ }
+
+ /// Test accepting one header with multiple values
+ #[test]
+ fn test_accept_header_invalid_parse() {
+ let header_string = format!("{APPLICATION_JSON}, a?;ef)");
+ let mut headers = HeaderMap::new();
+ headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap());
+ let result = get_accept_types(&headers);
+ assert!(result.is_err());
+ }
}
diff --git a/crates/pbs/Cargo.toml b/crates/pbs/Cargo.toml
index 1c5c2f1f..b0c1585e 100644
--- a/crates/pbs/Cargo.toml
+++ b/crates/pbs/Cargo.toml
@@ -15,6 +15,7 @@ cb-metrics.workspace = true
ethereum_ssz.workspace = true
eyre.workspace = true
futures.workspace = true
+headers.workspace = true
lazy_static.workspace = true
parking_lot.workspace = true
prometheus.workspace = true
diff --git a/crates/pbs/src/error.rs b/crates/pbs/src/error.rs
index 6c1c5c68..1214fd6a 100644
--- a/crates/pbs/src/error.rs
+++ b/crates/pbs/src/error.rs
@@ -1,4 +1,5 @@
use axum::{http::StatusCode, response::IntoResponse};
+use cb_common::utils::BodyDeserializeError;
#[derive(Debug)]
/// Errors that the PbsService returns to client
@@ -7,6 +8,7 @@ pub enum PbsClientError {
NoPayload,
Internal,
DecodeError(String),
+ RelayError(String),
}
impl PbsClientError {
@@ -16,10 +18,17 @@ impl PbsClientError {
PbsClientError::NoPayload => StatusCode::BAD_GATEWAY,
PbsClientError::Internal => StatusCode::INTERNAL_SERVER_ERROR,
PbsClientError::DecodeError(_) => StatusCode::BAD_REQUEST,
+ PbsClientError::RelayError(_) => StatusCode::FAILED_DEPENDENCY,
}
}
}
+impl From for PbsClientError {
+ fn from(e: BodyDeserializeError) -> Self {
+ PbsClientError::DecodeError(format!("failed to deserialize body: {e}"))
+ }
+}
+
impl IntoResponse for PbsClientError {
fn into_response(self) -> axum::response::Response {
let msg = match &self {
@@ -27,6 +36,7 @@ impl IntoResponse for PbsClientError {
PbsClientError::NoPayload => "no payload from relays".to_string(),
PbsClientError::Internal => "internal server error".to_string(),
PbsClientError::DecodeError(e) => format!("error decoding request: {e}"),
+ PbsClientError::RelayError(e) => format!("error processing relay response: {e}"),
};
(self.status_code(), msg).into_response()
diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs
index 86743703..e7992c31 100644
--- a/crates/pbs/src/mev_boost/get_header.rs
+++ b/crates/pbs/src/mev_boost/get_header.rs
@@ -12,20 +12,24 @@ use axum::http::{HeaderMap, HeaderValue};
use cb_common::{
constants::APPLICATION_BUILDER_DOMAIN,
pbs::{
- EMPTY_TX_ROOT_HASH, ExecutionPayloadHeaderRef, GetHeaderInfo, GetHeaderParams,
- GetHeaderResponse, HEADER_START_TIME_UNIX_MS, HEADER_TIMEOUT_MS, RelayClient,
+ EMPTY_TX_ROOT_HASH, ExecutionPayloadHeaderRef, ForkVersionDecode, GetHeaderInfo,
+ GetHeaderParams, GetHeaderResponse, HEADER_START_TIME_UNIX_MS, HEADER_TIMEOUT_MS,
+ RelayClient, SignedBuilderBid,
error::{PbsError, ValidationError},
},
signature::verify_signed_message,
types::{BlsPublicKey, BlsPublicKeyBytes, BlsSignature, Chain},
utils::{
- get_user_agent_with_version, ms_into_slot, read_chunked_body_with_max,
- timestamp_of_slot_start_sec, utcnow_ms,
+ EncodingType, get_accept_types, get_consensus_version_header, get_user_agent_with_version,
+ ms_into_slot, read_chunked_body_with_max, timestamp_of_slot_start_sec, utcnow_ms,
},
};
use futures::future::join_all;
use parking_lot::RwLock;
-use reqwest::{StatusCode, header::USER_AGENT};
+use reqwest::{
+ Response, StatusCode,
+ header::{ACCEPT, CONTENT_TYPE, USER_AGENT},
+};
use tokio::time::sleep;
use tracing::{Instrument, debug, error, warn};
use tree_hash::TreeHash;
@@ -97,6 +101,11 @@ pub async fn get_header(
let mut send_headers = HeaderMap::new();
send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?);
+ // Get the accept types from the request and forward them
+ for value in req_headers.get_all(ACCEPT).iter() {
+ send_headers.append(ACCEPT, value.clone());
+ }
+
let mut handles = Vec::with_capacity(relays.len());
for relay in relays.iter() {
handles.push(
@@ -305,43 +314,73 @@ async fn send_one_get_header(
params: GetHeaderParams,
relay: RelayClient,
chain: Chain,
- mut req_config: RequestContext,
+ req_config: RequestContext,
validation: ValidationContext,
) -> Result<(u64, Option), PbsError> {
- // the timestamp in the header is the consensus block time which is fixed,
- // use the beginning of the request as proxy to make sure we use only the
- // last one received
- let start_request_time = utcnow_ms();
- req_config.headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time));
+ let mut original_headers = req_config.headers.clone();
+
+ // Check which types this request is for
+ let accept_types = get_accept_types(&req_config.headers).map_err(|e| {
+ PbsError::GeneralRequest(format!("error reading accept types: {e}").to_string())
+ })?;
+ let accepts_ssz = accept_types.contains(&EncodingType::Ssz);
+ let accepts_json = accept_types.contains(&EncodingType::Json);
+
+ // Send the header request
+ let mut start_request = Instant::now();
+ let config = RequestContext {
+ url: req_config.url.clone(),
+ timeout_ms: req_config.timeout_ms,
+ headers: req_config.headers,
+ };
+ let (mut res, mut start_request_time, mut content_type) =
+ send_get_header_impl(&relay, config).await?;
+ let mut code = res.status();
- // The timeout header indicating how long a relay has to respond, so they can
- // minimize timing games without losing the bid
- req_config.headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(req_config.timeout_ms));
+ // If the request only supports SSZ, but the relay only supports JSON, resubmit
+ // to the relay with JSON - we'll convert it ourselves
+ if code.is_client_error() && accepts_ssz && !accepts_json {
+ debug!(
+ relay_id = relay.id.as_ref(),
+ "relay does not support SSZ, resubmitting request with JSON accept header"
+ );
- let start_request = Instant::now();
- let res = match relay
- .client
- .get(req_config.url)
- .timeout(Duration::from_millis(req_config.timeout_ms))
- .headers(req_config.headers)
- .send()
- .await
- {
- Ok(res) => res,
- Err(err) => {
+ // Make sure there's enough time left to resubmit
+ let elapsed = start_request.elapsed().as_millis() as u64;
+ if elapsed >= req_config.timeout_ms {
RELAY_STATUS_CODE
.with_label_values(&[TIMEOUT_ERROR_CODE_STR, GET_HEADER_ENDPOINT_TAG, &relay.id])
.inc();
- return Err(err.into());
+ return Err(PbsError::RelayResponse {
+ error_msg: "not enough time left to resubmit request with JSON accept header"
+ .to_string(),
+ code: TIMEOUT_ERROR_CODE,
+ });
}
- };
+
+ // Resubmit the request with JSON accept header
+ // Also resets the start request timer
+ original_headers
+ .insert(ACCEPT, HeaderValue::from_str(EncodingType::Json.content_type()).unwrap());
+ let config = RequestContext {
+ url: req_config.url.clone(),
+ timeout_ms: req_config.timeout_ms - elapsed,
+ headers: original_headers,
+ };
+ start_request = Instant::now();
+ (res, start_request_time, content_type) = send_get_header_impl(&relay, config).await?;
+ code = res.status();
+ }
+
+ // Get the consensus fork version if provided (to avoid cloning later)
+ let fork = get_consensus_version_header(res.headers());
+ let content_type_header = res.headers().get(CONTENT_TYPE).cloned();
let request_latency = start_request.elapsed();
RELAY_LATENCY
.with_label_values(&[GET_HEADER_ENDPOINT_TAG, &relay.id])
.observe(request_latency.as_secs_f64());
- let code = res.status();
RELAY_STATUS_CODE.with_label_values(&[code.as_str(), GET_HEADER_ENDPOINT_TAG, &relay.id]).inc();
let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_GET_HEADER_RESPONSE).await?;
@@ -363,15 +402,42 @@ async fn send_one_get_header(
return Ok((start_request_time, None));
}
- let get_header_response = match serde_json::from_slice::(&response_bytes) {
- Ok(parsed) => parsed,
- Err(err) => {
- return Err(PbsError::JsonDecode {
- err,
- raw: String::from_utf8_lossy(&response_bytes).into_owned(),
- });
- }
- };
+ // Regenerate the header from the response
+ let get_header_response =
+ match content_type {
+ Some(EncodingType::Ssz) => {
+ // Get the consensus fork version - this is required according to the spec
+ let fork = fork.ok_or(PbsError::RelayResponse {
+ error_msg: "relay did not provide consensus version header for ssz payload"
+ .to_string(),
+ code: code.as_u16(),
+ })?;
+ let data = SignedBuilderBid::from_ssz_bytes_by_fork(&response_bytes, fork)
+ .map_err(|e| PbsError::RelayResponse {
+ error_msg: (format!("error decoding relay payload: {e:?}")).to_string(),
+ code: (code.as_u16()),
+ })?;
+ GetHeaderResponse { version: fork, data, metadata: Default::default() }
+ }
+ Some(EncodingType::Json) => {
+ match serde_json::from_slice::(&response_bytes) {
+ Ok(parsed) => parsed,
+ Err(err) => {
+ return Err(PbsError::JsonDecode {
+ err,
+ raw: String::from_utf8_lossy(&response_bytes).into_owned(),
+ });
+ }
+ }
+ }
+ None => {
+ let error_msg = match content_type_header {
+ None => "relay response missing content type header".to_string(),
+ Some(ct) => format!("relay response has unsupported content type {ct:?}"),
+ };
+ return Err(PbsError::RelayResponse { error_msg, code: code.as_u16() });
+ }
+ };
debug!(
relay_id = relay.id.as_ref(),
@@ -380,6 +446,7 @@ async fn send_one_get_header(
version =? get_header_response.version,
value_eth = format_ether(*get_header_response.value()),
block_hash = %get_header_response.block_hash(),
+ content_type = ?content_type,
"received new header"
);
@@ -461,6 +528,56 @@ async fn send_one_get_header(
Ok((start_request_time, Some(get_header_response)))
}
+async fn send_get_header_impl(
+ relay: &RelayClient,
+ mut req_config: RequestContext,
+) -> Result<(Response, u64, Option), PbsError> {
+ // the timestamp in the header is the consensus block time which is fixed,
+ // use the beginning of the request as proxy to make sure we use only the
+ // last one received
+ let start_request_time = utcnow_ms();
+ req_config.headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time));
+
+ // The timeout header indicating how long a relay has to respond, so they can
+ // minimize timing games without losing the bid
+ req_config.headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(req_config.timeout_ms));
+
+ let res = match relay
+ .client
+ .get(req_config.url)
+ .timeout(Duration::from_millis(req_config.timeout_ms))
+ .headers(req_config.headers)
+ .send()
+ .await
+ {
+ Ok(res) => res,
+ Err(err) => {
+ RELAY_STATUS_CODE
+ .with_label_values(&[TIMEOUT_ERROR_CODE_STR, GET_HEADER_ENDPOINT_TAG, &relay.id])
+ .inc();
+ return Err(err.into());
+ }
+ };
+
+ // Get the content type; this is only really useful for OK responses, and
+ // doesn't handle encoding types besides SSZ and JSON
+ let mut content_type: Option = None;
+ if res.status() == StatusCode::OK &&
+ let Some(header) = res.headers().get(CONTENT_TYPE)
+ {
+ let header_str = header.to_str().map_err(|e| PbsError::RelayResponse {
+ error_msg: format!("cannot decode content-type header: {e}").to_string(),
+ code: (res.status().as_u16()),
+ })?;
+ if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) {
+ content_type = Some(EncodingType::Ssz)
+ } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) {
+ content_type = Some(EncodingType::Json)
+ }
+ }
+ Ok((res, start_request_time, content_type))
+}
+
struct HeaderData {
block_hash: B256,
parent_hash: B256,
diff --git a/crates/pbs/src/mev_boost/submit_block.rs b/crates/pbs/src/mev_boost/submit_block.rs
index 2b10dcaa..a4666949 100644
--- a/crates/pbs/src/mev_boost/submit_block.rs
+++ b/crates/pbs/src/mev_boost/submit_block.rs
@@ -8,22 +8,28 @@ use alloy::{eips::eip7594::CELLS_PER_EXT_BLOB, primitives::B256};
use axum::http::{HeaderMap, HeaderValue};
use cb_common::{
pbs::{
- BlindedBeaconBlock, BlobsBundle, BuilderApiVersion, ForkName, HEADER_CONSENSUS_VERSION,
- HEADER_START_TIME_UNIX_MS, KzgCommitments, RelayClient, SignedBlindedBeaconBlock,
- SubmitBlindedBlockResponse,
+ BlindedBeaconBlock, BlobsBundle, BuilderApiVersion, ForkName, ForkVersionDecode,
+ HEADER_CONSENSUS_VERSION, HEADER_START_TIME_UNIX_MS, KzgCommitments, PayloadAndBlobs,
+ RelayClient, SignedBlindedBeaconBlock, SubmitBlindedBlockResponse,
error::{PbsError, ValidationError},
},
- utils::{get_user_agent_with_version, read_chunked_body_with_max, utcnow_ms},
+ utils::{
+ EncodingType, get_accept_types, get_content_type, get_user_agent_with_version,
+ read_chunked_body_with_max, utcnow_ms,
+ },
};
use futures::{FutureExt, future::select_ok};
-use reqwest::header::USER_AGENT;
+use reqwest::{
+ Response, StatusCode,
+ header::{ACCEPT, CONTENT_TYPE, USER_AGENT},
+};
+use ssz::Encode;
use tracing::{debug, warn};
use url::Url;
use crate::{
- constants::{
- MAX_SIZE_SUBMIT_BLOCK_RESPONSE, SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, TIMEOUT_ERROR_CODE_STR,
- },
+ TIMEOUT_ERROR_CODE_STR,
+ constants::{MAX_SIZE_SUBMIT_BLOCK_RESPONSE, SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG},
metrics::{RELAY_LATENCY, RELAY_STATUS_CODE},
state::{BuilderApiState, PbsState},
};
@@ -59,6 +65,17 @@ pub async fn submit_block(
send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?);
send_headers.insert(HEADER_CONSENSUS_VERSION, consensus_version);
+ // Get the accept types from the request and forward them
+ for value in req_headers.get_all(ACCEPT).iter() {
+ send_headers.append(ACCEPT, value.clone());
+ }
+
+ // Copy the content type header
+ send_headers.insert(
+ CONTENT_TYPE,
+ HeaderValue::from_str(get_content_type(&req_headers).content_type()).unwrap(),
+ );
+
let mut handles = Vec::with_capacity(state.all_relays().len());
for relay in state.all_relays().iter().cloned() {
handles.push(
@@ -155,34 +172,55 @@ async fn send_submit_block(
api_version: &BuilderApiVersion,
fork_name: ForkName,
) -> Result