From fbc7d631dc84903e39fb555f9d5a4911367d7257 Mon Sep 17 00:00:00 2001 From: codingp110 Date: Tue, 19 Aug 2025 10:36:50 +0530 Subject: [PATCH 1/3] feat: add utilities for testing persistence Added the following functions to test if persistence of `bdk_chain` is happening correctly. - `persist_txgraph_changeset` - `persist_indexer_changeset` - `persist_local_chain_changeset` - `persist_last_seen`, `persist_last_evicted`, `persist_first_seen` - `persist_txouts` - `persist_txs` - `persist_anchors` - `persist_last_revealed` - `persist_spk_cache` Even though the first three tests cover every part of the `ChangeSet`s , the other tests are retained so as to help in unit testing. --- crates/testenv/Cargo.toml | 5 +- crates/testenv/src/lib.rs | 1 + crates/testenv/src/persist_test_utils.rs | 721 +++++++++++++++++++++++ 3 files changed, 725 insertions(+), 2 deletions(-) create mode 100644 crates/testenv/src/persist_test_utils.rs diff --git a/crates/testenv/Cargo.toml b/crates/testenv/Cargo.toml index eff58a41c..e2b42b10f 100644 --- a/crates/testenv/Cargo.toml +++ b/crates/testenv/Cargo.toml @@ -16,8 +16,10 @@ readme = "README.md" workspace = true [dependencies] -bdk_chain = { path = "../chain", version = "0.23.1", default-features = false } +bdk_chain = { path = "../chain", version = "0.23.1", default-features = false, features = ["miniscript"]} electrsd = { version = "0.28.0", features = [ "legacy" ], default-features = false } +anyhow = "1.0.98" +tempfile = "3.20.0" [dev-dependencies] bdk_testenv = { path = "." } @@ -27,6 +29,5 @@ default = ["std", "download"] download = ["electrsd/bitcoind_25_0", "electrsd/esplora_a33e97e1"] std = ["bdk_chain/std"] serde = ["bdk_chain/serde"] - [package.metadata.docs.rs] no-default-features = true diff --git a/crates/testenv/src/lib.rs b/crates/testenv/src/lib.rs index 9faf43bf2..ac3cc6326 100644 --- a/crates/testenv/src/lib.rs +++ b/crates/testenv/src/lib.rs @@ -1,5 +1,6 @@ #![cfg_attr(coverage_nightly, feature(coverage_attribute))] +pub mod persist_test_utils; pub mod utils; use bdk_chain::{ diff --git a/crates/testenv/src/persist_test_utils.rs b/crates/testenv/src/persist_test_utils.rs new file mode 100644 index 000000000..82a42dd5a --- /dev/null +++ b/crates/testenv/src/persist_test_utils.rs @@ -0,0 +1,721 @@ +use crate::block_id; +use crate::hash; +use bdk_chain::bitcoin; +use bdk_chain::miniscript::{Descriptor, DescriptorPublicKey}; +use bdk_chain::{ + bitcoin::{ + absolute, key::Secp256k1, transaction, Address, Amount, OutPoint, ScriptBuf, Transaction, + TxIn, TxOut, Txid, + }, + indexer::keychain_txout, + local_chain, tx_graph, ConfirmationBlockTime, DescriptorExt, DescriptorId, Merge, SpkIterator, +}; +use std::collections::{BTreeMap, BTreeSet}; +use std::path::Path; +use std::str::FromStr; +use std::sync::Arc; + +fn create_one_inp_one_out_tx(txid: Txid, amount: u64) -> Transaction { + Transaction { + version: transaction::Version::ONE, + lock_time: absolute::LockTime::ZERO, + input: vec![TxIn { + previous_output: OutPoint::new(txid, 0), + ..TxIn::default() + }], + output: vec![TxOut { + value: Amount::from_sat(amount), + script_pubkey: Address::from_str("bcrt1q3qtze4ys45tgdvguj66zrk4fu6hq3a3v9pfly5") + .unwrap() + .assume_checked() + .script_pubkey(), + }], + } +} + +fn spk_at_index(descriptor: &Descriptor, index: u32) -> ScriptBuf { + descriptor + .derived_descriptor(&Secp256k1::verification_only(), index) + .expect("must derive") + .script_pubkey() +} + +pub fn persist_txgraph_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + + let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { + block_id: block_id!(910425, "Rust"), + confirmation_time: 1755416660, + }; + + let mut tx_graph_changeset1 = ChangeSet:: { + txs: [tx1.clone()].into(), + txouts: [ + ( + OutPoint::new(hash!("BDK"), 0), + TxOut { + value: Amount::from_sat(1300), + script_pubkey: Address::from_str( + "bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl", + ) + .unwrap() + .assume_checked() + .script_pubkey(), + }, + ), + ( + OutPoint::new(hash!("Bitcoin_fixes_things"), 0), + TxOut { + value: Amount::from_sat(1400), + script_pubkey: Address::from_str( + "bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl", + ) + .unwrap() + .assume_checked() + .script_pubkey(), + }, + ), + ] + .into(), + anchors: [(conf_anchor, tx1.compute_txid())].into(), + last_seen: [(tx1.compute_txid(), 1755416650)].into(), + first_seen: [(tx1.compute_txid(), 1755416655)].into(), + last_evicted: [(tx1.compute_txid(), 1755416660)].into(), + }; + + persist(&mut store, &tx_graph_changeset1).expect("should persist changeset"); + + let changeset = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset, tx_graph_changeset1); + + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + + let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { + block_id: block_id!(910426, "BOSS"), + confirmation_time: 1755416700, + }; + + let tx_graph_changeset2 = ChangeSet:: { + txs: [tx2.clone()].into(), + txouts: [( + OutPoint::new(hash!("REDB"), 0), + TxOut { + value: Amount::from_sat(10000), + script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") + .unwrap() + .assume_checked() + .script_pubkey(), + }, + )] + .into(), + anchors: [(conf_anchor, tx2.compute_txid())].into(), + last_seen: [(tx2.compute_txid(), 1755416700)].into(), + first_seen: [(tx2.compute_txid(), 1755416670)].into(), + last_evicted: [(tx2.compute_txid(), 1755416760)].into(), + }; + + persist(&mut store, &tx_graph_changeset2).expect("should persist changeset"); + + let changeset = initialize(&mut store).expect("should load persisted changeset"); + + tx_graph_changeset1.merge(tx_graph_changeset2); + + assert_eq!(tx_graph_changeset1, changeset); +} + +fn parse_descriptor(descriptor: &str) -> Descriptor { + let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only(); + Descriptor::::parse_descriptor(&secp, descriptor) + .unwrap() + .0 +} + +pub fn persist_indexer_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use crate::utils::DESCRIPTORS; + use keychain_txout::ChangeSet; + + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let descriptor_ids = DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); + let descs = DESCRIPTORS.map(parse_descriptor); + + let mut changeset = ChangeSet { + last_revealed: [(descriptor_ids[0], 1), (descriptor_ids[1], 100)].into(), + spk_cache: [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=26).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[1], 0..=125).collect(), + ), + ] + .into(), + }; + + persist(&mut store, &changeset).expect("should persist keychain_txout"); + + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + + assert_eq!(changeset_read, changeset); + + let changeset_new = ChangeSet { + last_revealed: [(descriptor_ids[0], 2)].into(), + spk_cache: [( + descriptor_ids[0], + [(27, spk_at_index(&descs[0], 27))].into(), + )] + .into(), + }; + + persist(&mut store, &changeset_new).expect("should persist second changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load merged changesets"); + changeset.merge(changeset_new); + + assert_eq!(changeset_read_new, changeset); +} + +pub fn persist_local_chain_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &local_chain::ChangeSet) -> anyhow::Result<()>, +{ + use local_chain::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let changeset = ChangeSet { + blocks: [(910425, Some(hash!("B"))), (910426, Some(hash!("D")))].into(), + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read, changeset); + + // create another local_chain_changeset, persist that and read it + let changeset_new = ChangeSet { + blocks: [(910427, Some(hash!("K")))].into(), + }; + + persist(&mut store, &changeset_new).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + + let changeset = ChangeSet { + blocks: [ + (910425, Some(hash!("B"))), + (910426, Some(hash!("D"))), + (910427, Some(hash!("K"))), + ] + .into(), + }; + + assert_eq!(changeset, changeset_read_new); +} + +pub fn persist_last_seen( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + // try persisting and reading last_seen + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut last_seen: BTreeMap = [ + (tx1.compute_txid(), 1755416700), + (tx2.compute_txid(), 1755416800), + ] + .into(); + + let changeset = ChangeSet:: { + txs, + last_seen: last_seen.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_seen, last_seen); + + // persist another last_seen and see if what is read is same as merged one + let txs_new: BTreeSet> = [tx3.clone()].into(); + let last_seen_new: BTreeMap = [(tx3.compute_txid(), 1755417800)].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + last_seen: last_seen_new.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_seen.merge(last_seen_new); + assert_eq!(changeset_read_new.last_seen, last_seen); +} + +pub fn persist_last_evicted( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BDK"), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + // try persisting and reading last_evicted + let mut last_evicted: BTreeMap = [ + (tx1.compute_txid(), 1755416600), + (tx2.compute_txid(), 1755416060), + ] + .into(); + + let changeset = ChangeSet:: { + last_evicted: last_evicted.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_evicted, last_evicted); + + // persist another last_evicted and see if what is read is same as merged one + let last_evicted_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); + + let changeset = ChangeSet:: { + last_evicted: last_evicted_new.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_evicted.merge(last_evicted_new); + assert_eq!(changeset_read_new.last_evicted, last_evicted); +} + +pub fn persist_first_seen( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + // try persisting and reading first_seen + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut first_seen: BTreeMap = [ + (tx1.compute_txid(), 1755416600), + (tx2.compute_txid(), 1755416600), + ] + .into(); + + let changeset = ChangeSet:: { + txs, + first_seen: first_seen.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.first_seen, first_seen); + + // persist another first_seen and see if what is read is same as merged one + let txs_new: BTreeSet> = [tx3.clone()].into(); + let first_seen_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + first_seen: first_seen_new.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + first_seen.merge(first_seen_new); + assert_eq!(changeset_read_new.first_seen, first_seen); +} + +pub fn persist_txouts( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let mut txouts: BTreeMap = [ + ( + OutPoint::new(hash!("B"), 0), + TxOut { + value: Amount::from_sat(1300), + script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") + .unwrap() + .assume_checked() + .script_pubkey(), + }, + ), + ( + OutPoint::new(hash!("D"), 0), + TxOut { + value: Amount::from_sat(1400), + script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") + .unwrap() + .assume_checked() + .script_pubkey(), + }, + ), + ] + .into(); + + let changeset = ChangeSet:: { + txouts: txouts.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read = initialize(&mut store).expect("should load changeset"); + assert_eq!(changeset_read.txouts, txouts); + + let txouts_new: BTreeMap = [( + OutPoint::new(hash!("K"), 0), + TxOut { + value: Amount::from_sat(10000), + script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") + .unwrap() + .assume_checked() + .script_pubkey(), + }, + )] + .into(); + + let changeset = ChangeSet:: { + txouts: txouts_new.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load changeset"); + txouts.merge(txouts_new); + assert_eq!(changeset_read_new.txouts, txouts); +} + +pub fn persist_txs( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + let mut txs: BTreeSet> = [tx1, tx2.clone()].into(); + + let changeset = ChangeSet:: { + txs: txs.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.txs, txs); + + let txs_new: BTreeSet> = [tx3].into(); + + let changeset = ChangeSet:: { + txs: txs_new.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + txs.merge(txs_new); + assert_eq!(changeset_read_new.txs, txs); +} + +pub fn persist_anchors( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!(""), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + let anchor1 = ConfirmationBlockTime { + block_id: block_id!(23, "BTC"), + confirmation_time: 1756838400, + }; + + let anchor2 = ConfirmationBlockTime { + block_id: block_id!(25, "BDK"), + confirmation_time: 1756839600, + }; + + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut anchors: BTreeSet<(ConfirmationBlockTime, Txid)> = + [(anchor1, tx1.compute_txid()), (anchor2, tx2.compute_txid())].into(); + + let changeset = ChangeSet:: { + txs, + anchors: anchors.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.anchors, anchors); + + let txs_new: BTreeSet> = [tx3.clone()].into(); + let anchors_new: BTreeSet<(ConfirmationBlockTime, Txid)> = + [(anchor2, tx3.compute_txid())].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + anchors: anchors_new.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + + anchors.merge(anchors_new); + assert_eq!(changeset_read.anchors, anchors); +} + +// check the merge by changing asserts +pub fn persist_last_revealed( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use keychain_txout::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); + + let mut last_revealed: BTreeMap = + [(descriptor_ids[0], 1), (descriptor_ids[1], 100)].into(); + + let changeset = ChangeSet { + last_revealed: last_revealed.clone(), + ..ChangeSet::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_revealed, last_revealed); + + let last_revealed_new: BTreeMap = [(descriptor_ids[0], 2)].into(); + + let changeset = ChangeSet { + last_revealed: last_revealed_new.clone(), + ..ChangeSet::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_revealed.merge(last_revealed_new); + assert_eq!(changeset_read_new.last_revealed, last_revealed); +} + +pub fn persist_spk_cache( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use keychain_txout::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); + let descs = crate::utils::DESCRIPTORS.map(parse_descriptor); + + let spk_cache: BTreeMap> = [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=125).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[0], 0..=25).collect(), + ), + ] + .into(); + + let changeset = ChangeSet { + spk_cache: spk_cache.clone(), + ..ChangeSet::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.spk_cache, spk_cache); + + let spk_cache_new: BTreeMap> = [( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 126..=150).collect(), + )] + .into(); + + let changeset = ChangeSet { + spk_cache: spk_cache_new, + ..ChangeSet::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + let spk_cache: BTreeMap> = [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=150).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[0], 0..=25).collect(), + ), + ] + .into(); + assert_eq!(changeset_read_new.spk_cache, spk_cache); +} From 993dde902113a9e9d648b2f8e9754b02fa7f1bef Mon Sep 17 00:00:00 2001 From: codingp110 Date: Thu, 14 Aug 2025 23:32:08 +0530 Subject: [PATCH 2/3] test: use test utils to test file_store and sqlite --- crates/chain/tests/test_rusqlite_impl.rs | 227 +++++++++++++++++++++++ crates/file_store/Cargo.toml | 2 + crates/file_store/src/store.rs | 117 ++++++++++++ 3 files changed, 346 insertions(+) create mode 100644 crates/chain/tests/test_rusqlite_impl.rs diff --git a/crates/chain/tests/test_rusqlite_impl.rs b/crates/chain/tests/test_rusqlite_impl.rs new file mode 100644 index 000000000..6067a9864 --- /dev/null +++ b/crates/chain/tests/test_rusqlite_impl.rs @@ -0,0 +1,227 @@ +#![cfg(feature = "rusqlite")] +use bdk_chain::{keychain_txout, local_chain, tx_graph, ConfirmationBlockTime}; +use bdk_testenv::persist_test_utils::{ + persist_anchors, persist_first_seen, persist_indexer_changeset, persist_last_evicted, + persist_last_revealed, persist_last_seen, persist_local_chain_changeset, persist_spk_cache, + persist_txgraph_changeset, persist_txouts, persist_txs, +}; + +#[test] +fn txgraph_is_persisted() { + persist_txgraph_changeset::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn indexer_is_persisted() { + persist_indexer_changeset::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn local_chain_is_persisted() { + persist_local_chain_changeset::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + local_chain::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = local_chain::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn txouts_are_persisted() { + persist_txouts::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn txs_are_persisted() { + persist_txs::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn anchors_are_persisted() { + persist_anchors::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_seen_is_persisted() { + persist_last_seen::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_evicted_is_persisted() { + persist_last_evicted::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn first_seen_is_persisted() { + persist_first_seen::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_revealed_is_persisted() { + persist_last_revealed::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn spk_cache_is_persisted() { + persist_spk_cache::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} diff --git a/crates/file_store/Cargo.toml b/crates/file_store/Cargo.toml index 4b0ace935..4cbd24b1b 100644 --- a/crates/file_store/Cargo.toml +++ b/crates/file_store/Cargo.toml @@ -20,3 +20,5 @@ serde = { version = "1", features = ["derive"] } [dev-dependencies] tempfile = "3" +bdk_testenv = {path = "../testenv"} +bdk_chain = { path = "../chain", version = "0.23.1", default-features = false, features = ["serde"]} \ No newline at end of file diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs index 7e1867926..f36205af5 100644 --- a/crates/file_store/src/store.rs +++ b/crates/file_store/src/store.rs @@ -295,6 +295,13 @@ mod test { const TEST_MAGIC_BYTES: [u8; TEST_MAGIC_BYTES_LEN] = [98, 100, 107, 102, 115, 49, 49, 49, 49, 49, 49, 49]; + use bdk_chain::{keychain_txout, local_chain, tx_graph, ConfirmationBlockTime}; + use bdk_testenv::persist_test_utils::{ + persist_anchors, persist_first_seen, persist_indexer_changeset, persist_last_evicted, + persist_last_revealed, persist_last_seen, persist_local_chain_changeset, persist_spk_cache, + persist_txgraph_changeset, persist_txouts, persist_txs, + }; + type TestChangeSet = BTreeSet; /// Check behavior of [`Store::create`] and [`Store::load`]. @@ -599,4 +606,114 @@ mod test { // current position matches EOF assert_eq!(current_pointer, expected_pointer); } + + #[test] + fn txgraph_is_persisted() { + persist_txgraph_changeset::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn indexer_is_persisted() { + persist_indexer_changeset::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn local_chain_is_persisted() { + persist_local_chain_changeset::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn txouts_are_persisted() { + persist_txouts::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn txs_are_persisted() { + persist_txs::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn anchors_are_persisted() { + persist_anchors::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_seen_is_persisted() { + persist_last_seen::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_evicted_is_persisted() { + persist_last_evicted::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn first_seen_is_persisted() { + persist_first_seen::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_revealed_is_persisted() { + persist_last_revealed::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn spk_cache_is_persisted() { + persist_spk_cache::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } } From 83b54a60ddb97465911efc1379e01b4296b25317 Mon Sep 17 00:00:00 2001 From: codingp110 Date: Fri, 15 Aug 2025 13:19:53 +0530 Subject: [PATCH 3/3] docs: document persist_test_utils module --- crates/testenv/src/persist_test_utils.rs | 142 +++++++++++++++++++++-- 1 file changed, 135 insertions(+), 7 deletions(-) diff --git a/crates/testenv/src/persist_test_utils.rs b/crates/testenv/src/persist_test_utils.rs index 82a42dd5a..3765cea9f 100644 --- a/crates/testenv/src/persist_test_utils.rs +++ b/crates/testenv/src/persist_test_utils.rs @@ -1,3 +1,4 @@ +//! This module provides utility functions for testing custom persistence backends. use crate::block_id; use crate::hash; use bdk_chain::bitcoin; @@ -40,6 +41,14 @@ fn spk_at_index(descriptor: &Descriptor, index: u32) -> Scr .script_pubkey() } +/// tests if [`TxGraph`] is being persisted correctly +/// +/// [`TxGraph`]: +/// [`tx_graph::ChangeSet`]: +/// +/// We create a dummy [`tx_graph::ChangeSet`], persist it and check if loaded `ChangeSet` matches +/// the persisted one. We then create another such dummy `ChangeSet`, persist it and load it to +/// check if merged `ChangeSet` is returned. pub fn persist_txgraph_changeset( file_name: &str, create_store: CreateStore, @@ -51,13 +60,16 @@ pub fn persist_txgraph_changeset( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create the store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should load empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { @@ -100,11 +112,13 @@ pub fn persist_txgraph_changeset( last_evicted: [(tx1.compute_txid(), 1755416660)].into(), }; + // persist and load persist(&mut store, &tx_graph_changeset1).expect("should persist changeset"); let changeset = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset, tx_graph_changeset1); + // create another changeset let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { @@ -131,6 +145,7 @@ pub fn persist_txgraph_changeset( last_evicted: [(tx2.compute_txid(), 1755416760)].into(), }; + // persist, load and check if same as merged persist(&mut store, &tx_graph_changeset2).expect("should persist changeset"); let changeset = initialize(&mut store).expect("should load persisted changeset"); @@ -147,6 +162,15 @@ fn parse_descriptor(descriptor: &str) -> Descriptor { .0 } +/// tests if [`KeychainTxOutIndex`] is being persisted correctly +/// +/// [`KeychainTxOutIndex`]: +/// +/// [`keychain_txout::ChangeSet`]: +/// +/// We create a dummy [`keychain_txout::ChangeSet`], persist it and check if loaded `ChangeSet` +/// matches the persisted one. We then create another such dummy `ChangeSet`, persist it and load it +/// to check if merged `ChangeSet` is returned. pub fn persist_indexer_changeset( file_name: &str, create_store: CreateStore, @@ -160,13 +184,16 @@ pub fn persist_indexer_changeset( use crate::utils::DESCRIPTORS; use keychain_txout::ChangeSet; + // create the store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should load empty changeset"); assert_eq!(changeset, ChangeSet::default()); + // create changeset let descriptor_ids = DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); let descs = DESCRIPTORS.map(parse_descriptor); @@ -185,12 +212,14 @@ pub fn persist_indexer_changeset( .into(), }; + // persist and load persist(&mut store, &changeset).expect("should persist keychain_txout"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read, changeset); + // create another changeset let changeset_new = ChangeSet { last_revealed: [(descriptor_ids[0], 2)].into(), spk_cache: [( @@ -200,6 +229,7 @@ pub fn persist_indexer_changeset( .into(), }; + // persist, load and check if same as merged persist(&mut store, &changeset_new).expect("should persist second changeset"); let changeset_read_new = initialize(&mut store).expect("should load merged changesets"); @@ -208,6 +238,14 @@ pub fn persist_indexer_changeset( assert_eq!(changeset_read_new, changeset); } +/// tests if [`LocalChain`] is being persisted correctly +/// +/// [`LocalChain`]: +/// [`local_chain::ChangeSet`]: +/// +/// We create a dummy [`local_chain::ChangeSet`], persist it and check if loaded `ChangeSet` matches +/// the persisted one. We then create another such dummy `ChangeSet`, persist it and load it to +/// check if merged `ChangeSet` is returned. pub fn persist_local_chain_changeset( file_name: &str, create_store: CreateStore, @@ -219,27 +257,32 @@ pub fn persist_local_chain_changeset( Persist: Fn(&mut Store, &local_chain::ChangeSet) -> anyhow::Result<()>, { use local_chain::ChangeSet; + // create the store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should load empty changeset"); assert_eq!(changeset, ChangeSet::default()); + // create changeset let changeset = ChangeSet { blocks: [(910425, Some(hash!("B"))), (910426, Some(hash!("D")))].into(), }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read, changeset); - // create another local_chain_changeset, persist that and read it + // create another changeset let changeset_new = ChangeSet { blocks: [(910427, Some(hash!("K")))].into(), }; + // persist, load and check if same as merged persist(&mut store, &changeset_new).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); @@ -256,6 +299,12 @@ pub fn persist_local_chain_changeset( assert_eq!(changeset, changeset_read_new); } +/// tests if `last_seen` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `last_seen` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. pub fn persist_last_seen( file_name: &str, create_store: CreateStore, @@ -267,19 +316,21 @@ pub fn persist_last_seen( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("store should initialize and we should get empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); - // try persisting and reading last_seen let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); let mut last_seen: BTreeMap = [ (tx1.compute_txid(), 1755416700), @@ -292,11 +343,12 @@ pub fn persist_last_seen( last_seen: last_seen.clone(), ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.last_seen, last_seen); - // persist another last_seen and see if what is read is same as merged one + // create another changeset let txs_new: BTreeSet> = [tx3.clone()].into(); let last_seen_new: BTreeMap = [(tx3.compute_txid(), 1755417800)].into(); @@ -305,6 +357,7 @@ pub fn persist_last_seen( last_seen: last_seen_new.clone(), ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); @@ -312,6 +365,12 @@ pub fn persist_last_seen( assert_eq!(changeset_read_new.last_seen, last_seen); } +/// tests if `last_evicted` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `last_evicted` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. pub fn persist_last_evicted( file_name: &str, create_store: CreateStore, @@ -323,14 +382,17 @@ pub fn persist_last_evicted( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("store should initialize and we should get empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BDK"), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); @@ -346,17 +408,20 @@ pub fn persist_last_evicted( last_evicted: last_evicted.clone(), ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.last_evicted, last_evicted); - // persist another last_evicted and see if what is read is same as merged one + // create another changeset let last_evicted_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); let changeset = ChangeSet:: { last_evicted: last_evicted_new.clone(), ..ChangeSet::::default() }; + + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); @@ -364,6 +429,12 @@ pub fn persist_last_evicted( assert_eq!(changeset_read_new.last_evicted, last_evicted); } +/// tests if `first_seen` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `first_seen` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. pub fn persist_first_seen( file_name: &str, create_store: CreateStore, @@ -375,19 +446,21 @@ pub fn persist_first_seen( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("store should initialize and we should get empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); - // try persisting and reading first_seen let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); let mut first_seen: BTreeMap = [ (tx1.compute_txid(), 1755416600), @@ -400,11 +473,12 @@ pub fn persist_first_seen( first_seen: first_seen.clone(), ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.first_seen, first_seen); - // persist another first_seen and see if what is read is same as merged one + // create another changeset let txs_new: BTreeSet> = [tx3.clone()].into(); let first_seen_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); @@ -413,6 +487,7 @@ pub fn persist_first_seen( first_seen: first_seen_new.clone(), ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); @@ -420,6 +495,11 @@ pub fn persist_first_seen( assert_eq!(changeset_read_new.first_seen, first_seen); } +/// tests if `txouts` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `txouts` field populated, persist it and +/// check if loaded `ChangeSet` matches the persisted one. We then create another such dummy +/// `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_txouts( file_name: &str, create_store: CreateStore, @@ -431,10 +511,12 @@ pub fn persist_txouts( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // initialize store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // create changeset let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::default()); @@ -467,11 +549,13 @@ pub fn persist_txouts( ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load changeset"); assert_eq!(changeset_read.txouts, txouts); + // create another changeset let txouts_new: BTreeMap = [( OutPoint::new(hash!("K"), 0), TxOut { @@ -489,6 +573,7 @@ pub fn persist_txouts( ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load changeset"); @@ -496,6 +581,11 @@ pub fn persist_txouts( assert_eq!(changeset_read_new.txouts, txouts); } +/// tests if `txs` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `txs` field populated, persist it and check +/// if loaded `ChangeSet` matches the persisted one. We then create another such dummy `ChangeSet`, +/// persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_txs( file_name: &str, create_store: CreateStore, @@ -507,13 +597,16 @@ pub fn persist_txs( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); @@ -525,23 +618,31 @@ pub fn persist_txs( ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.txs, txs); let txs_new: BTreeSet> = [tx3].into(); + // create another changeset let changeset = ChangeSet:: { txs: txs_new.clone(), ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); txs.merge(txs_new); assert_eq!(changeset_read_new.txs, txs); } +/// tests if `anchors` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `anchors` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create another +/// such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_anchors( file_name: &str, create_store: CreateStore, @@ -553,13 +654,16 @@ pub fn persist_anchors( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!(""), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); @@ -584,10 +688,12 @@ pub fn persist_anchors( ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.anchors, anchors); + // create another changeset let txs_new: BTreeSet> = [tx3.clone()].into(); let anchors_new: BTreeSet<(ConfirmationBlockTime, Txid)> = [(anchor2, tx3.compute_txid())].into(); @@ -598,6 +704,7 @@ pub fn persist_anchors( ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); @@ -605,7 +712,11 @@ pub fn persist_anchors( assert_eq!(changeset_read.anchors, anchors); } -// check the merge by changing asserts +/// tests if `last_revealed` field of [`keychain_txout::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`keychain_txout::ChangeSet`] with only `last_revealed` field populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create another +/// such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_last_revealed( file_name: &str, create_store: CreateStore, @@ -617,13 +728,16 @@ pub fn persist_last_revealed( Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, { use keychain_txout::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::default()); + // create changeset let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); let mut last_revealed: BTreeMap = @@ -634,10 +748,12 @@ pub fn persist_last_revealed( ..ChangeSet::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.last_revealed, last_revealed); + // create another changeset let last_revealed_new: BTreeMap = [(descriptor_ids[0], 2)].into(); let changeset = ChangeSet { @@ -645,12 +761,18 @@ pub fn persist_last_revealed( ..ChangeSet::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); last_revealed.merge(last_revealed_new); assert_eq!(changeset_read_new.last_revealed, last_revealed); } +/// tests if `spk_cache` field of [`keychain_txout::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`keychain_txout::ChangeSet`] with only `spk_cache` field populated, persist +/// it and check if loaded `ChangeSet` matches the persisted one. We then create another such dummy +/// `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_spk_cache( file_name: &str, create_store: CreateStore, @@ -662,13 +784,16 @@ pub fn persist_spk_cache( Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, { use keychain_txout::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::default()); + // create changeset let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); let descs = crate::utils::DESCRIPTORS.map(parse_descriptor); @@ -689,10 +814,12 @@ pub fn persist_spk_cache( ..ChangeSet::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.spk_cache, spk_cache); + // create another changeset let spk_cache_new: BTreeMap> = [( descriptor_ids[0], SpkIterator::new_with_range(&descs[0], 126..=150).collect(), @@ -704,6 +831,7 @@ pub fn persist_spk_cache( ..ChangeSet::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); let spk_cache: BTreeMap> = [