Skip to content

Commit 05166bd

Browse files
Merge branch 'unstable' into memory_aware_caching
2 parents 257938d + be7a772 commit 05166bd

File tree

117 files changed

+2754
-1296
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

117 files changed

+2754
-1296
lines changed

.github/workflows/local-testnet.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,6 @@ jobs:
218218
name: genesis-sync-test-${{ matrix.fork }}-${{ matrix.offline_secs }}s
219219
runs-on: ubuntu-latest
220220
needs: dockerfile-ubuntu
221-
if: contains(github.event.pull_request.labels.*.name, 'syncing')
222221
strategy:
223222
matrix:
224223
fork: [electra, fulu]
@@ -266,10 +265,11 @@ jobs:
266265
'run-local-testnet',
267266
'doppelganger-protection-success-test',
268267
'doppelganger-protection-failure-test',
268+
'genesis-sync-test'
269269
]
270270
steps:
271271
- uses: actions/checkout@v5
272272
- name: Check that success job is dependent on all others
273273
run: |
274-
exclude_jobs='checkpoint-sync-test|genesis-sync-test'
274+
exclude_jobs='checkpoint-sync-test'
275275
./scripts/ci/check-success-job.sh ./.github/workflows/local-testnet.yml local-testnet-success "$exclude_jobs"

Cargo.lock

Lines changed: 3 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ PROFILE ?= release
3232

3333
# List of all hard forks. This list is used to set env variables for several tests so that
3434
# they run for different forks.
35-
FORKS=phase0 altair bellatrix capella deneb electra fulu
35+
FORKS=phase0 altair bellatrix capella deneb electra fulu gloas
3636

3737
# List of all recent hard forks. This list is used to set env variables for http_api tests
3838
RECENT_FORKS=electra fulu

beacon_node/beacon_chain/src/beacon_block_streamer.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ use types::{
1616
};
1717
use types::{
1818
ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadElectra,
19-
ExecutionPayloadFulu, ExecutionPayloadHeader,
19+
ExecutionPayloadFulu, ExecutionPayloadGloas, ExecutionPayloadHeader,
2020
};
2121

2222
#[derive(PartialEq)]
@@ -101,6 +101,7 @@ fn reconstruct_default_header_block<E: EthSpec>(
101101
ForkName::Deneb => ExecutionPayloadDeneb::default().into(),
102102
ForkName::Electra => ExecutionPayloadElectra::default().into(),
103103
ForkName::Fulu => ExecutionPayloadFulu::default().into(),
104+
ForkName::Gloas => ExecutionPayloadGloas::default().into(),
104105
ForkName::Base | ForkName::Altair => {
105106
return Err(Error::PayloadReconstruction(format!(
106107
"Block with fork variant {} has execution payload",
@@ -715,14 +716,15 @@ mod tests {
715716
}
716717

717718
#[tokio::test]
718-
async fn check_all_blocks_from_altair_to_fulu() {
719+
async fn check_all_blocks_from_altair_to_gloas() {
719720
let slots_per_epoch = MinimalEthSpec::slots_per_epoch() as usize;
720721
let num_epochs = 12;
721722
let bellatrix_fork_epoch = 2usize;
722723
let capella_fork_epoch = 4usize;
723724
let deneb_fork_epoch = 6usize;
724725
let electra_fork_epoch = 8usize;
725726
let fulu_fork_epoch = 10usize;
727+
let gloas_fork_epoch = 12usize;
726728
let num_blocks_produced = num_epochs * slots_per_epoch;
727729

728730
let mut spec = test_spec::<MinimalEthSpec>();
@@ -732,6 +734,7 @@ mod tests {
732734
spec.deneb_fork_epoch = Some(Epoch::new(deneb_fork_epoch as u64));
733735
spec.electra_fork_epoch = Some(Epoch::new(electra_fork_epoch as u64));
734736
spec.fulu_fork_epoch = Some(Epoch::new(fulu_fork_epoch as u64));
737+
spec.gloas_fork_epoch = Some(Epoch::new(gloas_fork_epoch as u64));
735738
let spec = Arc::new(spec);
736739

737740
let harness = get_harness(VALIDATOR_COUNT, spec.clone());

beacon_node/beacon_chain/src/beacon_chain.rs

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3059,6 +3059,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
30593059

30603060
/// Cache the data columns in the processing cache, process it, then evict it from the cache if it was
30613061
/// imported or errors.
3062+
#[instrument(skip_all, level = "debug")]
30623063
pub async fn process_gossip_data_columns(
30633064
self: &Arc<Self>,
30643065
data_columns: Vec<GossipVerifiedDataColumn<T>>,
@@ -5726,6 +5727,48 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
57265727
execution_payload_value,
57275728
)
57285729
}
5730+
BeaconState::Gloas(_) => {
5731+
let (
5732+
payload,
5733+
kzg_commitments,
5734+
maybe_blobs_and_proofs,
5735+
maybe_requests,
5736+
execution_payload_value,
5737+
) = block_contents
5738+
.ok_or(BlockProductionError::MissingExecutionPayload)?
5739+
.deconstruct();
5740+
5741+
(
5742+
BeaconBlock::Gloas(BeaconBlockGloas {
5743+
slot,
5744+
proposer_index,
5745+
parent_root,
5746+
state_root: Hash256::zero(),
5747+
body: BeaconBlockBodyGloas {
5748+
randao_reveal,
5749+
eth1_data,
5750+
graffiti,
5751+
proposer_slashings: proposer_slashings.into(),
5752+
attester_slashings: attester_slashings_electra.into(),
5753+
attestations: attestations_electra.into(),
5754+
deposits: deposits.into(),
5755+
voluntary_exits: voluntary_exits.into(),
5756+
sync_aggregate: sync_aggregate
5757+
.ok_or(BlockProductionError::MissingSyncAggregate)?,
5758+
execution_payload: payload
5759+
.try_into()
5760+
.map_err(|_| BlockProductionError::InvalidPayloadFork)?,
5761+
bls_to_execution_changes: bls_to_execution_changes.into(),
5762+
blob_kzg_commitments: kzg_commitments
5763+
.ok_or(BlockProductionError::InvalidPayloadFork)?,
5764+
execution_requests: maybe_requests
5765+
.ok_or(BlockProductionError::MissingExecutionRequests)?,
5766+
},
5767+
}),
5768+
maybe_blobs_and_proofs,
5769+
execution_payload_value,
5770+
)
5771+
}
57295772
};
57305773

57315774
let block = SignedBeaconBlock::from_block(

beacon_node/beacon_chain/src/beacon_fork_choice_store.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ where
377377
.store
378378
.get_hot_state(&self.justified_state_root, update_cache)
379379
.map_err(Error::FailedToReadState)?
380-
.ok_or_else(|| Error::MissingState(self.justified_state_root))?;
380+
.ok_or(Error::MissingState(self.justified_state_root))?;
381381

382382
self.justified_balances = JustifiedBalances::from_justified_state(&state)?;
383383
}

beacon_node/beacon_chain/src/capella_readiness.rs

Lines changed: 0 additions & 121 deletions
This file was deleted.

beacon_node/beacon_chain/src/data_availability_checker.rs

Lines changed: 54 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ use tracing::{debug, error, instrument};
2020
use types::blob_sidecar::{BlobIdentifier, BlobSidecar, FixedBlobSidecarList};
2121
use types::{
2222
BlobSidecarList, ChainSpec, DataColumnSidecar, DataColumnSidecarList, Epoch, EthSpec, Hash256,
23-
RuntimeVariableList, SignedBeaconBlock, Slot,
23+
SignedBeaconBlock, Slot,
2424
};
2525

2626
mod error;
@@ -29,7 +29,7 @@ mod state_lru_cache;
2929

3030
use crate::data_column_verification::{
3131
CustodyDataColumn, GossipVerifiedDataColumn, KzgVerifiedCustodyDataColumn,
32-
KzgVerifiedDataColumn, verify_kzg_for_data_column_list_with_scoring,
32+
KzgVerifiedDataColumn, verify_kzg_for_data_column_list,
3333
};
3434
use crate::metrics::{
3535
KZG_DATA_COLUMN_RECONSTRUCTION_ATTEMPTS, KZG_DATA_COLUMN_RECONSTRUCTION_FAILURES,
@@ -378,7 +378,7 @@ impl<T: BeaconChainTypes> DataAvailabilityChecker<T> {
378378
}
379379
if self.data_columns_required_for_block(&block) {
380380
return if let Some(data_column_list) = data_columns.as_ref() {
381-
verify_kzg_for_data_column_list_with_scoring(
381+
verify_kzg_for_data_column_list(
382382
data_column_list
383383
.iter()
384384
.map(|custody_column| custody_column.as_data_column()),
@@ -445,13 +445,11 @@ impl<T: BeaconChainTypes> DataAvailabilityChecker<T> {
445445
.flatten()
446446
.map(CustodyDataColumn::into_inner)
447447
.collect::<Vec<_>>();
448-
let all_data_columns =
449-
RuntimeVariableList::from_vec(all_data_columns, T::EthSpec::number_of_columns());
450448

451449
// verify kzg for all data columns at once
452450
if !all_data_columns.is_empty() {
453451
// Attributes fault to the specific peer that sent an invalid column
454-
verify_kzg_for_data_column_list_with_scoring(all_data_columns.iter(), &self.kzg)
452+
verify_kzg_for_data_column_list(all_data_columns.iter(), &self.kzg)
455453
.map_err(AvailabilityCheckError::InvalidColumn)?;
456454
}
457455

@@ -849,6 +847,7 @@ mod test {
849847
use std::sync::Arc;
850848
use std::time::Duration;
851849
use store::HotColdDB;
850+
use types::data_column_sidecar::DataColumn;
852851
use types::{ChainSpec, ColumnIndex, EthSpec, ForkName, MainnetEthSpec, Slot};
853852

854853
type E = MainnetEthSpec;
@@ -1011,6 +1010,55 @@ mod test {
10111010
)
10121011
}
10131012

1013+
/// Regression test for KZG verification truncation bug (https://github.com/sigp/lighthouse/pull/7927)
1014+
#[test]
1015+
fn verify_kzg_for_rpc_blocks_should_not_truncate_data_columns() {
1016+
let spec = Arc::new(ForkName::Fulu.make_genesis_spec(E::default_spec()));
1017+
let mut rng = StdRng::seed_from_u64(0xDEADBEEF0BAD5EEDu64);
1018+
let da_checker = new_da_checker(spec.clone());
1019+
1020+
// GIVEN multiple RPC blocks with data columns totalling more than 128
1021+
let blocks_with_columns = (0..2)
1022+
.map(|index| {
1023+
let (block, data_columns) = generate_rand_block_and_data_columns::<E>(
1024+
ForkName::Fulu,
1025+
NumBlobs::Number(1),
1026+
&mut rng,
1027+
&spec,
1028+
);
1029+
1030+
let custody_columns = if index == 0 {
1031+
// 128 valid data columns in the first block
1032+
data_columns
1033+
.into_iter()
1034+
.map(CustodyDataColumn::from_asserted_custody)
1035+
.collect::<Vec<_>>()
1036+
} else {
1037+
// invalid data columns in the second block
1038+
data_columns
1039+
.into_iter()
1040+
.map(|d| {
1041+
let invalid_sidecar = DataColumnSidecar {
1042+
column: DataColumn::<E>::empty(),
1043+
..d.as_ref().clone()
1044+
};
1045+
CustodyDataColumn::from_asserted_custody(Arc::new(invalid_sidecar))
1046+
})
1047+
.collect::<Vec<_>>()
1048+
};
1049+
1050+
RpcBlock::new_with_custody_columns(None, Arc::new(block), custody_columns)
1051+
.expect("should create RPC block with custody columns")
1052+
})
1053+
.collect::<Vec<_>>();
1054+
1055+
// WHEN verifying all blocks together (totalling 256 data columns)
1056+
let verification_result = da_checker.verify_kzg_for_rpc_blocks(blocks_with_columns);
1057+
1058+
// THEN batch block verification should fail due to 128 invalid columns in the second block
1059+
verification_result.expect_err("should have failed to verify blocks");
1060+
}
1061+
10141062
fn init_custody_context_with_ordered_columns(
10151063
custody_context: &Arc<CustodyContext<E>>,
10161064
mut rng: &mut StdRng,

beacon_node/beacon_chain/src/data_availability_checker/error.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ use types::{BeaconStateError, ColumnIndex, Hash256};
44
#[derive(Debug)]
55
pub enum Error {
66
InvalidBlobs(KzgError),
7-
InvalidColumn(Vec<(ColumnIndex, KzgError)>),
7+
InvalidColumn((Option<ColumnIndex>, KzgError)),
88
ReconstructColumnsError(KzgError),
99
KzgCommitmentMismatch {
1010
blob_commitment: KzgCommitment,

0 commit comments

Comments
 (0)