Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
162 changes: 123 additions & 39 deletions crates/chain/src/local_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use core::ops::RangeBounds;

use crate::collections::BTreeMap;
use crate::{BlockId, ChainOracle, Merge};
use bdk_core::ToBlockHash;
pub use bdk_core::{CheckPoint, CheckPointIter};
use bdk_core::{CheckPointEntry, ToBlockHash};
use bitcoin::block::Header;
use bitcoin::BlockHash;

Expand Down Expand Up @@ -69,7 +69,10 @@ impl<D> PartialEq for LocalChain<D> {
}
}

impl<D> ChainOracle for LocalChain<D> {
impl<D> ChainOracle for LocalChain<D>
where
D: ToBlockHash + Copy,
{
type Error = Infallible;

fn is_block_in_chain(
Expand All @@ -83,10 +86,18 @@ impl<D> ChainOracle for LocalChain<D> {
Some(cp) if cp.hash() == chain_tip.hash => cp,
_ => return Ok(None),
};
match chain_tip_cp.get(block.height) {
Some(cp) => Ok(Some(cp.hash() == block.hash)),
None => Ok(None),

if let Some(cp) = chain_tip_cp.get(block.height) {
return Ok(Some(cp.hash() == block.hash));
}

if let Some(next_cp) = chain_tip_cp.get(block.height.saturating_add(1)) {
if let Some(prev_hash) = next_cp.prev_blockhash() {
return Ok(Some(prev_hash == block.hash));
}
}

Ok(None)
}

fn get_chain_tip(&self) -> Result<BlockId, Self::Error> {
Expand Down Expand Up @@ -653,59 +664,132 @@ where
}
}
(Some(o), Some(u)) => {
if o.hash() == u.hash() {
// We have found our point of agreement 🎉 -- we require that the previous (i.e.
// higher because we are iterating backwards) block in the original chain was
// invalidated (if it exists). This ensures that there is an unambiguous point
// of connection to the original chain from the update chain
// (i.e. we know the precisely which original blocks are
// invalid).
if !prev_orig_was_invalidated && !point_of_agreement_found {
if let (Some(prev_orig), Some(_prev_update)) = (&prev_orig, &prev_update) {
if o.height() == u.height() {
if o.hash() == u.hash() {
// We have found our point of agreement 🎉 -- we require that the previous
// (i.e. higher because we are iterating backwards) block in the original
// chain was invalidated (if it exists). This ensures that there is an
// unambiguous point of connection to the original chain from the update
// chain (i.e. we know the precisely which original blocks are invalid).
if !prev_orig_was_invalidated && !point_of_agreement_found {
if let (Some(prev_orig), Some(_prev_update)) =
(&prev_orig, &prev_update)
{
return Err(CannotConnectError {
try_include_height: prev_orig.height(),
});
}
}
point_of_agreement_found = true;
prev_orig_was_invalidated = false;

// OPTIMIZATION 2 -- if we have the same underlying pointer at this point,
// we can guarantee that no older blocks are introduced.
if o.eq_ptr(u) {
if is_update_height_superset_of_original {
return Ok((update_tip, changeset));
} else {
let new_tip =
apply_changeset_to_checkpoint(original_tip, &changeset)
.map_err(|_| CannotConnectError {
try_include_height: 0,
})?;
return Ok((new_tip, changeset));
}
}
} else {
// We have an invalidation height so we set the height to the updated hash
// and also purge all the original chain block hashes above this block.
changeset.blocks.insert(u.height(), Some(u.data()));
for invalidated_height in potentially_invalidated_heights.drain(..) {
changeset.blocks.insert(invalidated_height, None);
}
prev_orig_was_invalidated = true;
}
prev_orig = curr_orig.take();
prev_update = curr_update.take();
}
// Compare original and update entries when heights differ by exactly 1.
else if o.height() == u.height() + 1 {
let o_entry = CheckPointEntry::CheckPoint(o.clone());
if let Some(o_prev) = o_entry.as_prev() {
if o_prev.height() == u.height() && o_prev.hash() == u.hash() {
// Ambiguous: update did not provide a real checkpoint at o.height().
return Err(CannotConnectError {
try_include_height: prev_orig.height(),
try_include_height: o.height(),
});
} else {
// No match: treat as o > u case.
potentially_invalidated_heights.push(o.height());
prev_orig_was_invalidated = false;
prev_orig = curr_orig.take();
is_update_height_superset_of_original = false;
}
} else {
// No prev available: treat as o > u case.
potentially_invalidated_heights.push(o.height());
prev_orig_was_invalidated = false;
prev_orig = curr_orig.take();
is_update_height_superset_of_original = false;
}
point_of_agreement_found = true;
prev_orig_was_invalidated = false;
// OPTIMIZATION 2 -- if we have the same underlying pointer at this point, we
// can guarantee that no older blocks are introduced.
if o.eq_ptr(u) {
if is_update_height_superset_of_original {
return Ok((update_tip, changeset));
} else if u.height() == o.height() + 1 {
let u_entry = CheckPointEntry::CheckPoint(u.clone());
if let Some(u_as_prev) = u_entry.as_prev() {
if u_as_prev.height() == o.height() && u_as_prev.hash() == o.hash() {
// Agreement via `prev_blockhash`.
if !prev_orig_was_invalidated && !point_of_agreement_found {
if let (Some(prev_orig), Some(_prev_update)) =
(&prev_orig, &prev_update)
{
return Err(CannotConnectError {
try_include_height: prev_orig.height(),
});
}
}
point_of_agreement_found = true;
prev_orig_was_invalidated = false;

// Update is missing a real checkpoint at o.height().
is_update_height_superset_of_original = false;

// Record the update checkpoint one-above the agreed parent.
changeset.blocks.insert(u.height(), Some(u.data()));

// Advance both sides after agreement.
prev_orig = curr_orig.take();
prev_update = curr_update.take();
} else {
let new_tip = apply_changeset_to_checkpoint(original_tip, &changeset)
.map_err(|_| CannotConnectError {
try_include_height: 0,
})?;
return Ok((new_tip, changeset));
// No match: add update block.
changeset.blocks.insert(u.height(), Some(u.data()));
prev_update = curr_update.take();
}
} else {
// No prev available: just add update block.
changeset.blocks.insert(u.height(), Some(u.data()));
prev_update = curr_update.take();
}
} else if o.height() > u.height() {
// Original > Update: mark original as potentially invalidated.
potentially_invalidated_heights.push(o.height());
prev_orig_was_invalidated = false;
prev_orig = curr_orig.take();
is_update_height_superset_of_original = false;
Comment on lines +771 to +776
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

question: isn't this already being covered by the lines L652-656 ?

} else {
// We have an invalidation height so we set the height to the updated hash and
// also purge all the original chain block hashes above this block.
// Update > Original: add update block.
changeset.blocks.insert(u.height(), Some(u.data()));
for invalidated_height in potentially_invalidated_heights.drain(..) {
changeset.blocks.insert(invalidated_height, None);
}
prev_orig_was_invalidated = true;
prev_update = curr_update.take();
}
prev_update = curr_update.take();
prev_orig = curr_orig.take();
}
(None, None) => {
break;
}
_ => {
unreachable!("compiler cannot tell that everything has been covered")
unreachable!("should have been handled above")
}
}
}

// When we don't have a point of agreement you can imagine it is implicitly the
// genesis block so we need to do the final connectivity check which in this case
// just means making sure the entire original chain was invalidated.
// Final connectivity check
if !prev_orig_was_invalidated && !point_of_agreement_found {
if let Some(prev_orig) = prev_orig {
return Err(CannotConnectError {
Expand Down
102 changes: 102 additions & 0 deletions crates/core/src/checkpoint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,78 @@ use bitcoin::{block::Header, BlockHash};

use crate::BlockId;

/// Internal type to represent entries in `CheckPoint` that can handle both actual checkpoint data
/// and placeholder entries for types that have `prev_blockhash`.
#[derive(Debug, Clone, PartialEq)]
pub enum CheckPointEntry<D> {
/// An actual `CheckPoint` entry.
CheckPoint(CheckPoint<D>),
/// A `CheckPoint` representing a `prev_blockhash` reference.
PrevBlockHash(CheckPoint<D>),
}

impl<D> CheckPointEntry<D> {
/// Returns true if this entry is a `prev_blockhash` reference.
pub fn is_prev_blockhash(&self) -> bool {
matches!(self, CheckPointEntry::PrevBlockHash(_))
}

/// Returns true if this entry contains actual `CheckPoint` data.
pub fn is_checkpoint(&self) -> bool {
matches!(self, CheckPointEntry::CheckPoint(_))
}

/// Get the height of this entry.
pub fn height(&self) -> u32 {
match self {
CheckPointEntry::CheckPoint(cp) => cp.height(),
CheckPointEntry::PrevBlockHash(cp) => cp.height().saturating_sub(1),
}
}

/// Get the `BlockHash` of this entry.
pub fn hash(&self) -> BlockHash
where
D: ToBlockHash,
{
match self {
CheckPointEntry::CheckPoint(cp) => cp.hash(),
CheckPointEntry::PrevBlockHash(cp) => cp
.prev_blockhash()
.expect("PrevBlockHash variant must have prev_blockhash"),
}
}

/// Create a synthetic prev entry at height `h - 1`.
pub fn as_prev(&self) -> Option<CheckPointEntry<D>>
where
D: ToBlockHash,
{
match self {
CheckPointEntry::CheckPoint(cp) => {
if cp.prev_blockhash().is_some() && cp.height() > 0 {
Some(CheckPointEntry::PrevBlockHash(cp.clone()))
} else {
None
}
}
CheckPointEntry::PrevBlockHash(_) => None, // Can't create prev of prev
}
}

/// Move to the next lower height `CheckPoint` entry.
pub fn next(&self) -> Option<CheckPointEntry<D>> {
match self {
CheckPointEntry::CheckPoint(cp) => cp
.prev()
.map(|prev_cp| CheckPointEntry::CheckPoint(prev_cp)),
CheckPointEntry::PrevBlockHash(cp) => cp
.prev()
.map(|prev_cp| CheckPointEntry::CheckPoint(prev_cp)),
}
}
}

/// A checkpoint is a node of a reference-counted linked list of [`BlockId`]s.
///
/// Checkpoints are cheaply cloneable and are useful to find the agreement point between two sparse
Expand Down Expand Up @@ -68,6 +140,11 @@ impl<D> Drop for CPInner<D> {
pub trait ToBlockHash {
/// Returns the [`BlockHash`] for the associated [`CheckPoint`] `data` type.
fn to_blockhash(&self) -> BlockHash;

/// Returns `None` if the type has no knowledge of the previous [`BlockHash`].
fn prev_blockhash(&self) -> Option<BlockHash> {
None
}
}

impl ToBlockHash for BlockHash {
Expand All @@ -80,6 +157,23 @@ impl ToBlockHash for Header {
fn to_blockhash(&self) -> BlockHash {
self.block_hash()
}

fn prev_blockhash(&self) -> Option<BlockHash> {
Some(self.prev_blockhash)
}
}

impl<D: ToBlockHash> ToBlockHash for CheckPointEntry<D> {
fn to_blockhash(&self) -> BlockHash {
self.hash()
}

fn prev_blockhash(&self) -> Option<BlockHash> {
match self {
CheckPointEntry::CheckPoint(cp) => cp.prev_blockhash(),
CheckPointEntry::PrevBlockHash(_) => None,
}
}
}

impl<D> PartialEq for CheckPoint<D> {
Expand Down Expand Up @@ -188,6 +282,14 @@ impl<D> CheckPoint<D> {
pub fn eq_ptr(&self, other: &Self) -> bool {
Arc::as_ptr(&self.0) == Arc::as_ptr(&other.0)
}

/// Return the `prev_blockhash` from the `CheckPoint`, if available.
pub fn prev_blockhash(&self) -> Option<BlockHash>
where
D: ToBlockHash,
{
self.0.data.prev_blockhash()
}
}

// Methods where `D: ToBlockHash`
Expand Down
Loading