Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 36 additions & 12 deletions crates/chain/src/local_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,14 @@ where
let mut base: Option<CheckPoint<D>> = None;

for cp in init_cp.iter() {
if cp.height() >= start_height {
extension.insert(cp.height(), cp.data());
} else {
base = Some(cp);
break;
// Base tip should always have data.
if let Some(data) = cp.data() {
if cp.height() >= start_height {
extension.insert(cp.height(), data);
} else {
base = Some(cp);
break;
}
}
}

Expand All @@ -51,6 +54,7 @@ where
.expect("extension is strictly greater than base"),
None => LocalChain::from_blocks(extension)?.tip(),
};

init_cp = new_tip;
}

Expand Down Expand Up @@ -322,11 +326,7 @@ where
/// recover the current chain.
pub fn initial_changeset(&self) -> ChangeSet<D> {
ChangeSet {
blocks: self
.tip
.iter()
.map(|cp| (cp.height(), Some(cp.data())))
.collect(),
blocks: self.tip.iter().map(|cp| (cp.height(), cp.data())).collect(),
}
}

Expand All @@ -349,6 +349,20 @@ where
update_hash: Some(data.to_blockhash()),
});
}

// If this `CheckPoint` is an empty placeholder, append the `data` to it.
if original_cp.data_ref().is_none() {
let mut changeset = ChangeSet::<D>::default();
changeset.blocks.insert(height, Some(data));
self.apply_changeset(&changeset)
.map_err(|_| AlterCheckPointError {
height: 0,
original_hash: self.genesis_hash(),
update_hash: None,
})?;
return Ok(changeset);
}

return Ok(ChangeSet::default());
}

Expand Down Expand Up @@ -634,7 +648,12 @@ where
match (curr_orig.as_ref(), curr_update.as_ref()) {
// Update block that doesn't exist in the original chain
(o, Some(u)) if Some(u.height()) > o.map(|o| o.height()) => {
changeset.blocks.insert(u.height(), Some(u.data()));
// Only append to `ChangeSet` when the update has complete data. Entries where
// `data` does not exist that are created via `prev_blockhash` should not alter the
// `ChangeSet`.
if let Some(data) = u.data() {
changeset.blocks.insert(u.height(), Some(data));
}
prev_update = curr_update.take();
}
// Original block that isn't in the update
Expand Down Expand Up @@ -682,10 +701,15 @@ where
return Ok((new_tip, changeset));
}
}
// Even if the hashes are the same, the update may contain data which the
// original does not have.
if let (None, Some(u_data)) = (o.data_ref(), u.data()) {
changeset.blocks.insert(u.height(), Some(u_data));
}
} else {
// We have an invalidation height so we set the height to the updated hash and
// also purge all the original chain block hashes above this block.
changeset.blocks.insert(u.height(), Some(u.data()));
changeset.blocks.insert(u.height(), u.data());
for invalidated_height in potentially_invalidated_heights.drain(..) {
changeset.blocks.insert(invalidated_height, None);
}
Expand Down
156 changes: 155 additions & 1 deletion crates/chain/tests/test_local_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use bdk_chain::{
BlockId,
};
use bdk_testenv::{chain_update, hash, local_chain};
use bitcoin::{block::Header, hashes::Hash, BlockHash};
use bitcoin::{block::Header, hashes::Hash, BlockHash, CompactTarget, TxMerkleNode};
use proptest::prelude::*;

#[derive(Debug)]
Expand Down Expand Up @@ -474,6 +474,160 @@ fn local_chain_insert_header() {
}
}

/// Validates `merge_chains` behavior on chains that contain placeholder checkpoints (`data: None`).
///
/// Placeholders are created when a `CheckPoint`’s `prev_blockhash` references a block at a height
/// with no stored checkpoint. This test ensures `merge_chains` handles them correctly and that the
/// resulting chain never exposes a placeholder checkpoint.
#[test]
fn merge_chains_handles_placeholders() {
fn header(prev_blockhash: bitcoin::BlockHash, nonce: u32) -> Header {
Header {
version: bitcoin::block::Version::default(),
prev_blockhash,
merkle_root: TxMerkleNode::all_zeros(),
time: 0,
bits: CompactTarget::default(),
nonce,
}
}

fn local_chain(blocks: Vec<(u32, Header)>) -> LocalChain<Header> {
LocalChain::from_blocks(blocks.into_iter().collect::<BTreeMap<_, _>>())
.expect("chain must have genesis block")
}

fn update_chain(blocks: &[(u32, Header)]) -> CheckPoint<Header> {
CheckPoint::from_blocks(blocks.iter().copied()).expect("checkpoint must be valid")
}

let a = header(hash!("genesis"), 0);
let b = header(a.block_hash(), 0);
let c = header(b.block_hash(), 0);
let d = header(c.block_hash(), 0);
let e = header(d.block_hash(), 0);

// Set a different `nonce` for conflicting `Header`s to ensure different `BlockHash`.
let c_conflict = header(b.block_hash(), 1);
let d_conflict = header(c_conflict.block_hash(), 1);

struct TestCase {
name: &'static str,
updates: Vec<CheckPoint<Header>>,
invalidate_heights: Vec<u32>,
expected_placeholder_heights: Vec<u32>,
expected_chain: LocalChain<Header>,
}

let test_cases = [
// Test case 1: Create a placeholder for B via C and a placeholder for D via E.
TestCase {
name: "insert_placeholder",
updates: vec![update_chain(&[(0, a), (2, c), (4, e)])],
invalidate_heights: vec![],
expected_placeholder_heights: vec![1, 3],
expected_chain: local_chain(vec![(0, a), (2, c), (4, e)]),
},
// Test cast 2: Create a placeholder for B via C, then update provides conflicting C'.
TestCase {
name: "conflict_at_tip_keeps_placeholder",
updates: vec![
update_chain(&[(0, a), (2, c)]),
update_chain(&[(2, c_conflict)]),
],
invalidate_heights: vec![],
expected_placeholder_heights: vec![1],
expected_chain: local_chain(vec![(0, a), (1, b), (2, c_conflict)]),
},
// Test case 3: Create placeholder for C via D.
TestCase {
name: "conflict_at_filled_height",
updates: vec![update_chain(&[(0, a), (3, d)])],
invalidate_heights: vec![],
expected_placeholder_heights: vec![2],
expected_chain: local_chain(vec![(0, a), (3, d)]),
},
// Test case 4: Create placeholder for C via D, then insert conflicting C' which should
// drop D and replace C.
TestCase {
name: "conflict_at_filled_height",
updates: vec![
update_chain(&[(0, a), (3, d)]),
update_chain(&[(0, a), (2, c_conflict)]),
],
invalidate_heights: vec![],
expected_placeholder_heights: vec![1],
expected_chain: local_chain(vec![(0, a), (2, c_conflict)]),
},
// Test case 5: Create placeholder for B via C, then invalidate C.
TestCase {
name: "invalidate_tip_falls_back",
updates: vec![update_chain(&[(0, a), (2, c)])],
invalidate_heights: vec![2],
expected_placeholder_heights: vec![],
expected_chain: local_chain(vec![(0, a)]),
},
// Test case 6: Create placeholder for C via D, then insert D' which has `prev_blockhash`
// that does not point to C. TODO: Handle error?
TestCase {
name: "expected_error",
updates: vec![
update_chain(&[(0, a), (3, d)]),
update_chain(&[(3, d_conflict)]),
],
invalidate_heights: vec![],
expected_placeholder_heights: vec![2],
expected_chain: local_chain(vec![(0, a), (3, d)]),
},
];

for (i, t) in test_cases.into_iter().enumerate() {
let mut chain = local_chain(vec![(0, a)]);
for upd in t.updates {
// If `apply_update` errors, it is because the new chain cannot be merged. So it should
// follow that this validates behavior if the final `expected_chain` state is correct.
if chain.apply_update(upd).is_ok() {
if !t.invalidate_heights.is_empty() {
let cs: ChangeSet<Header> = t
.invalidate_heights
.iter()
.copied()
.map(|h| (h, None))
.collect();
chain.apply_changeset(&cs).expect("changeset should apply");
}

// Ensure we never end up with a placeholder tip.
assert!(
chain.tip().data_ref().is_some(),
"[{}] {}: tip must always be materialized",
i,
t.name
);
}
}

let mut placeholder_heights = chain
.tip()
.iter()
.filter(|cp| cp.data_ref().is_none())
.map(|cp| cp.height())
.collect::<Vec<_>>();
placeholder_heights.sort();
assert_eq!(
placeholder_heights, t.expected_placeholder_heights,
"[{}] {}: placeholder height mismatch",
i, t.name
);

assert_eq!(
chain, t.expected_chain,
"[{}] {}: unexpected final chain",
i, t.name
);
}
}

#[test]
fn local_chain_disconnect_from() {
struct TestCase {
Expand Down
Loading
Loading