diff --git a/stackslib/src/chainstate/nakamoto/tests/node.rs b/stackslib/src/chainstate/nakamoto/tests/node.rs index e7a6135d18..794def1106 100644 --- a/stackslib/src/chainstate/nakamoto/tests/node.rs +++ b/stackslib/src/chainstate/nakamoto/tests/node.rs @@ -1060,6 +1060,105 @@ impl TestStacksNode { let cost = builder.tenure_finish(tenure_tx).unwrap(); Ok((block, size, cost)) } + + /// Insert a staging Nakamoto block as a pushed block and + /// then process it as the next ready block + /// NOTE: Will panic if called with unprocessed staging + /// blocks already in the queue. + pub fn process_pushed_next_ready_block<'a>( + stacks_node: &mut TestStacksNode, + sortdb: &mut SortitionDB, + miner: &mut TestMiner, + tenure_id_consensus_hash: &ConsensusHash, + coord: &mut ChainsCoordinator< + 'a, + TestEventObserver, + (), + OnChainRewardSetProvider<'a, TestEventObserver>, + (), + (), + BitcoinIndexer, + >, + nakamoto_block: NakamotoBlock, + ) -> Result, ChainstateError> { + // Before processeding, make sure the caller did not accidentally construct a test with unprocessed blocks already in the queue + let nakamoto_blocks_db = stacks_node.chainstate.nakamoto_blocks_db(); + assert!(nakamoto_blocks_db + .next_ready_nakamoto_block(stacks_node.chainstate.db()) + .unwrap().is_none(), "process_pushed_next_ready_block can only be called if the staging blocks queue is empty"); + + let tenure_sn = + SortitionDB::get_block_snapshot_consensus(sortdb.conn(), tenure_id_consensus_hash)? + .ok_or_else(|| ChainstateError::NoSuchBlockError)?; + + let cycle = sortdb + .pox_constants + .block_height_to_reward_cycle(sortdb.first_block_height, tenure_sn.block_height) + .unwrap(); + + // Get the reward set + let sort_tip_sn = SortitionDB::get_canonical_burn_chain_tip(sortdb.conn())?; + let reward_set = load_nakamoto_reward_set( + miner + .burnchain + .block_height_to_reward_cycle(sort_tip_sn.block_height) + .expect("FATAL: no reward cycle for sortition"), + &sort_tip_sn.sortition_id, + &miner.burnchain, + &mut stacks_node.chainstate, + &nakamoto_block.header.parent_block_id, + sortdb, + &OnChainRewardSetProvider::new(), + ) + .expect("Failed to load reward set") + .expect("Expected a reward set") + .0 + .known_selected_anchor_block_owned() + .expect("Unknown reward set"); + + let block_id = nakamoto_block.block_id(); + + debug!( + "Process Nakamoto block {block_id} ({:?}", + &nakamoto_block.header + ); + + let sort_tip = SortitionDB::get_canonical_sortition_tip(sortdb.conn())?; + let mut sort_handle = sortdb.index_handle(&sort_tip); + + // Force the block to be added to the nakamoto_staging_blocks table + let config = stacks_node.chainstate.config(); + let (headers_conn, staging_db_tx) = + stacks_node.chainstate.headers_conn_and_staging_tx_begin()?; + let accepted = NakamotoChainState::accept_block( + &config, + &nakamoto_block, + &mut sort_handle, + &staging_db_tx, + headers_conn, + &reward_set, + NakamotoBlockObtainMethod::Pushed, + )?; + staging_db_tx.commit()?; + debug!("Accepted Nakamoto block {}", &nakamoto_block.block_id()); + // Actually attempt to process the accepted block added to nakamoto_staging_blocks + // Will attempt to execute the transactions via a call to append_block + let res = NakamotoChainState::process_next_nakamoto_block( + &mut coord.chain_state_db, + &mut coord.sortition_db, + &coord.canonical_sortition_tip.clone().expect( + "FAIL: processing a new Stacks block, but don't have a canonical sortition tip", + ), + coord.dispatcher, + coord.config.txindex, + )?; + if res.is_some() { + // If we successfully processed the block, make sure we append the block to our current tenure + // so subsequent blocks do not attempt to reorg it. + stacks_node.add_nakamoto_extended_blocks(vec![nakamoto_block]); + } + Ok(res) + } } /// Get the Nakamoto parent linkage data for building atop the last-produced tenure or diff --git a/stackslib/src/chainstate/tests/consensus.rs b/stackslib/src/chainstate/tests/consensus.rs index f224f72646..2526ceaf1a 100644 --- a/stackslib/src/chainstate/tests/consensus.rs +++ b/stackslib/src/chainstate/tests/consensus.rs @@ -37,8 +37,8 @@ use stacks_common::bitvec::BitVec; use crate::burnchains::PoxConstants; use crate::chainstate::burn::db::sortdb::SortitionDB; use crate::chainstate::nakamoto::{NakamotoBlock, NakamotoBlockHeader, NakamotoChainState}; -use crate::chainstate::stacks::boot::RewardSet; use crate::chainstate::stacks::db::{ClarityTx, StacksChainState, StacksEpochReceipt}; +use crate::chainstate::stacks::tests::TestStacksNode; use crate::chainstate::stacks::{ Error as ChainstateError, StacksTransaction, TenureChangeCause, MINER_BLOCK_CONSENSUS_HASH, MINER_BLOCK_HEADER_HASH, @@ -54,6 +54,13 @@ pub const SK_1: &str = "a1289f6438855da7decf9b61b852c882c398cff1446b2a0f823538aa pub const SK_2: &str = "4ce9a8f7539ea93753a36405b16e8b57e15a552430410709c2b6d65dca5c02e201"; pub const SK_3: &str = "cb95ddd0fe18ec57f4f3533b95ae564b3f1ae063dbf75b46334bd86245aef78501"; +const EPOCHS_TO_TEST: [StacksEpochId; 4] = [ + StacksEpochId::Epoch30, + StacksEpochId::Epoch31, + StacksEpochId::Epoch32, + StacksEpochId::Epoch33, +]; + /// The private key for the faucet account. pub const FAUCET_PRIV_KEY: LazyCell = LazyCell::new(|| { StacksPrivateKey::from_hex("510f96a8efd0b11e211733c1ac5e3fa6f3d3fcdd62869e376c47decb3e14fea101") @@ -298,9 +305,15 @@ impl ConsensusTest<'_> { let tenure_change_tx = self.chain.miner.make_nakamoto_tenure_change(tenure_change); let coinbase_tx = self.chain.miner.make_nakamoto_coinbase(None, vrf_proof); - let _blocks_and_sizes = - self.chain - .make_nakamoto_tenure(tenure_change_tx, coinbase_tx, Some(0)); + let blocks_and_sizes = self + .chain + .make_nakamoto_tenure(tenure_change_tx, coinbase_tx, Some(0)) + .unwrap(); + assert_eq!( + blocks_and_sizes.len(), + 1, + "Mined more than one Nakamoto block" + ); let burn_block_height = self.chain.get_burn_block_height(); current_epoch = SortitionDB::get_stacks_epoch(self.chain.sortdb().conn(), burn_block_height) @@ -331,59 +344,38 @@ impl ConsensusTest<'_> { for (i, block) in epoch_blocks.iter().enumerate() { debug!("--------- Running block {i} for epoch {epoch:?} ---------"); let (nakamoto_block, block_size) = self.construct_nakamoto_block(&block); - let sortdb = self.chain.sortdb.take().unwrap(); + let mut sortdb = self.chain.sortdb.take().unwrap(); + let mut stacks_node = self.chain.stacks_node.take().unwrap(); let chain_tip = NakamotoChainState::get_canonical_block_header( - self.chain.stacks_node().chainstate.db(), + stacks_node.chainstate.db(), &sortdb, ) .unwrap() .unwrap(); let pox_constants = PoxConstants::test_default(); + let sig_hash = nakamoto_block.header.signer_signature_hash(); debug!( - "--------- Appending block {} ---------", - nakamoto_block.header.signer_signature_hash(); + "--------- Processing block {sig_hash} ---------"; "block" => ?nakamoto_block ); - { - let (mut chainstate_tx, clarity_instance) = self - .chain - .stacks_node() - .chainstate - .chainstate_tx_begin() - .unwrap(); - - let mut burndb_conn = sortdb.index_handle_at_tip(); - - let result = NakamotoChainState::append_block( - &mut chainstate_tx, - clarity_instance, - &mut burndb_conn, - &chain_tip.consensus_hash, - &pox_constants, - &chain_tip, - &chain_tip.burn_header_hash, - chain_tip.burn_header_height, - chain_tip.burn_header_timestamp, - &nakamoto_block, - block_size.try_into().unwrap(), - nakamoto_block.header.burn_spent, - 1500, - &RewardSet::empty(), - false, - ); - - debug!("--------- Appended block: {} ---------", result.is_ok()); - let remapped_result = result.map(|(receipt, clarity_commit, _, _)| { - clarity_commit.commit(); - receipt - }); - let expected_marf = nakamoto_block.header.state_index_root; - results.push(ExpectedResult::create_from(remapped_result, expected_marf)); - chainstate_tx.commit().unwrap(); - } - + let expected_marf = nakamoto_block.header.state_index_root; + let res = TestStacksNode::process_pushed_next_ready_block( + &mut stacks_node, + &mut sortdb, + &mut self.chain.miner, + &chain_tip.consensus_hash, + &mut self.chain.coord, + nakamoto_block.clone(), + ); + debug!( + "--------- Processed block: {sig_hash} ---------"; + "block" => ?nakamoto_block + ); + let remapped_result = res.map(|receipt| receipt.unwrap()).into(); + results.push(ExpectedResult::create_from(remapped_result, expected_marf)); // Restore chainstate for the next block self.chain.sortdb = Some(sortdb); + self.chain.stacks_node = Some(stacks_node); } } results @@ -523,31 +515,13 @@ impl ConsensusTest<'_> { #[test] fn test_append_empty_blocks() { + let empty_test_blocks = vec![TestBlock { + transactions: vec![], + }]; let mut epoch_blocks = HashMap::new(); - epoch_blocks.insert( - StacksEpochId::Epoch30, - vec![TestBlock { - transactions: vec![], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch31, - vec![TestBlock { - transactions: vec![], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch32, - vec![TestBlock { - transactions: vec![], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch33, - vec![TestBlock { - transactions: vec![], - }], - ); + for epoch in EPOCHS_TO_TEST { + epoch_blocks.insert(epoch, empty_test_blocks.clone()); + } let test_vector = ConsensusTestVector { initial_balances: vec![], @@ -564,53 +538,42 @@ fn test_append_stx_transfers_success() { StacksPrivateKey::from_hex(SK_2).unwrap(), StacksPrivateKey::from_hex(SK_3).unwrap(), ]; + let total_epochs = EPOCHS_TO_TEST.len() as u64; let send_amount = 1_000; let tx_fee = 180; + // initialize balances let mut initial_balances = Vec::new(); - let transactions: Vec<_> = sender_privks - .iter() - .map(|sender_privk| { - initial_balances.push(( - StacksAddress::p2pkh(false, &StacksPublicKey::from_private(sender_privk)).into(), - send_amount + tx_fee, - )); - // Interestingly, it doesn't seem to care about nonce... - make_stacks_transfer_tx( - sender_privk, - 0, - tx_fee, - CHAIN_ID_TESTNET, - &boot_code_addr(false).into(), - send_amount, - ) - }) - .collect(); + for sender_privk in &sender_privks { + let sender_addr = + StacksAddress::p2pkh(false, &StacksPublicKey::from_private(sender_privk)).into(); + // give them enough to cover all transfers across all epochs + initial_balances.push((sender_addr, (send_amount + tx_fee) * total_epochs)); + } + // build transactions per epoch, incrementing nonce per sender let mut epoch_blocks = HashMap::new(); - epoch_blocks.insert( - StacksEpochId::Epoch30, - vec![TestBlock { - transactions: transactions.clone(), - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch31, - vec![TestBlock { - transactions: transactions.clone(), - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch32, - vec![TestBlock { - transactions: transactions.clone(), - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch33, - vec![TestBlock { - transactions: transactions.clone(), - }], - ); + let mut nonces = vec![0u64; sender_privks.len()]; // track nonce per sender + + for epoch in EPOCHS_TO_TEST { + let transactions: Vec<_> = sender_privks + .iter() + .enumerate() + .map(|(i, sender_privk)| { + let tx = make_stacks_transfer_tx( + sender_privk, + nonces[i], // use current nonce + tx_fee, + CHAIN_ID_TESTNET, + &boot_code_addr(false).into(), + send_amount, + ); + nonces[i] += 1; // increment for next epoch + tx + }) + .collect(); + + epoch_blocks.insert(epoch, vec![TestBlock { transactions }]); + } let test_vector = ConsensusTestVector { initial_balances, @@ -639,32 +602,13 @@ fn test_append_chainstate_error_expression_stack_depth_too_deep() { ); let tx = StacksTransaction::consensus_deserialize(&mut &tx_bytes[..]).unwrap(); - + let test_blocks = vec![TestBlock { + transactions: vec![tx.clone()], + }]; let mut epoch_blocks = HashMap::new(); - epoch_blocks.insert( - StacksEpochId::Epoch30, - vec![TestBlock { - transactions: vec![tx.clone()], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch31, - vec![TestBlock { - transactions: vec![tx.clone()], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch32, - vec![TestBlock { - transactions: vec![tx.clone()], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch33, - vec![TestBlock { - transactions: vec![tx.clone()], - }], - ); + for epoch in EPOCHS_TO_TEST { + epoch_blocks.insert(epoch, test_blocks.clone()); + } let test_vector = ConsensusTestVector { initial_balances: vec![], @@ -676,45 +620,35 @@ fn test_append_chainstate_error_expression_stack_depth_too_deep() { #[test] fn test_append_block_with_contract_upload_success() { - let contract_name = "test-contract"; - let contract_content = "(/ 1 1)"; - let tx_fee = (contract_content.len() * 100) as u64; + // build transactions per epoch, incrementing nonce per sender + let mut epoch_blocks = HashMap::new(); - let tx_bytes = make_contract_publish( - &FAUCET_PRIV_KEY, - 0, - tx_fee, - CHAIN_ID_TESTNET, - contract_name, - &contract_content, - ); - let tx = StacksTransaction::consensus_deserialize(&mut &tx_bytes[..]).unwrap(); + EPOCHS_TO_TEST + .into_iter() + .enumerate() + .for_each(|(nonce, epoch)| { + // Can't deploy to the same contract location so make sure contract name changes + let contract_name = format!("test-contract-{nonce}"); + let contract_content = "(/ 1 1)"; + let tx_fee = (contract_content.len() * 100) as u64; + + let tx_bytes = make_contract_publish( + &FAUCET_PRIV_KEY, + nonce as u64, + tx_fee, + CHAIN_ID_TESTNET, + &contract_name, + contract_content, + ); + let tx = StacksTransaction::consensus_deserialize(&mut &tx_bytes[..]).unwrap(); + epoch_blocks.insert( + epoch, + vec![TestBlock { + transactions: vec![tx], + }], + ); + }); - let mut epoch_blocks = HashMap::new(); - epoch_blocks.insert( - StacksEpochId::Epoch30, - vec![TestBlock { - transactions: vec![tx.clone()], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch31, - vec![TestBlock { - transactions: vec![tx.clone()], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch32, - vec![TestBlock { - transactions: vec![tx.clone()], - }], - ); - epoch_blocks.insert( - StacksEpochId::Epoch33, - vec![TestBlock { - transactions: vec![tx.clone()], - }], - ); let test_vector = ConsensusTestVector { initial_balances: vec![], epoch_blocks, @@ -724,7 +658,7 @@ fn test_append_block_with_contract_upload_success() { insta::assert_ron_snapshot!(result, @r#" [ Success(ExpectedBlockOutput( - marf_hash: "b45acd35f4c48a834a2f898ca8bb6c48416ac6bec9d8a3f3662b61ab97b1edde", + marf_hash: "ace4d5c5ffb440418fb30fe1999769ab7fff5a243b775b9961a1dfa77d7a1fab", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -749,7 +683,7 @@ fn test_append_block_with_contract_upload_success() { ), )), Success(ExpectedBlockOutput( - marf_hash: "521d75234ec6c64f68648b6b0f6f385d89b58efb581211a411e0e88aa71f3371", + marf_hash: "cf7a58c3c15ae61b0861a77a9909e9b05fe35a8d23f974461fd1317693413d3c", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -774,7 +708,7 @@ fn test_append_block_with_contract_upload_success() { ), )), Success(ExpectedBlockOutput( - marf_hash: "511e1cc37e83ef3de4ea56962574d6ddd2d8840d24d9238f19eee5a35127df6a", + marf_hash: "ad7f9b2130fda2ca8f5c75237755ab7055f69f91d937b2d0653d52f515765e6f", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -799,7 +733,7 @@ fn test_append_block_with_contract_upload_success() { ), )), Success(ExpectedBlockOutput( - marf_hash: "3520c2dd96f7d91e179c4dcd00f3c49c16d6ec21434fb16921922558282eab26", + marf_hash: "25eff57753c490824fc0205b4493d7073e378f0d4648810454cc7e06276fe7da", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -830,59 +764,45 @@ fn test_append_block_with_contract_upload_success() { #[test] fn test_append_block_with_contract_call_success() { let tx_fee = (FOO_CONTRACT.len() * 100) as u64; - - let tx_bytes = make_contract_publish( - &FAUCET_PRIV_KEY, - 0, - tx_fee, - CHAIN_ID_TESTNET, - "foo_contract", - FOO_CONTRACT, - ); - let tx_contract_deploy = - StacksTransaction::consensus_deserialize(&mut tx_bytes.as_slice()).unwrap(); - - let tx_bytes = make_contract_call( - &FAUCET_PRIV_KEY, - 1, - 200, - CHAIN_ID_TESTNET, - &to_addr(&FAUCET_PRIV_KEY), - "foo_contract", - "bar", - &[ClarityValue::UInt(1)], - ); - let tx_contract_call = - StacksTransaction::consensus_deserialize(&mut tx_bytes.as_slice()).unwrap(); - + let mut nonce = 0; + // build transactions per epoch, incrementing nonce per sender let mut epoch_blocks = HashMap::new(); - epoch_blocks.insert( - StacksEpochId::Epoch30, - vec![TestBlock { - transactions: vec![tx_contract_deploy.clone(), tx_contract_call.clone()], - }], - ); - - epoch_blocks.insert( - StacksEpochId::Epoch31, - vec![TestBlock { - transactions: vec![tx_contract_deploy.clone(), tx_contract_call.clone()], - }], - ); - - epoch_blocks.insert( - StacksEpochId::Epoch32, - vec![TestBlock { - transactions: vec![tx_contract_deploy.clone(), tx_contract_call.clone()], - }], - ); - - epoch_blocks.insert( - StacksEpochId::Epoch33, - vec![TestBlock { - transactions: vec![tx_contract_deploy, tx_contract_call], - }], - ); + EPOCHS_TO_TEST.into_iter().for_each(|epoch| { + // we need to change the contract name across deploys since same sender + let contract_name = format!("foo_contract_{nonce}"); + let tx_bytes = make_contract_publish( + &FAUCET_PRIV_KEY, + nonce, + tx_fee, + CHAIN_ID_TESTNET, + &contract_name, + FOO_CONTRACT, + ); + nonce += 1; + let tx_contract_deploy = + StacksTransaction::consensus_deserialize(&mut tx_bytes.as_slice()).unwrap(); + + let tx_bytes = make_contract_call( + &FAUCET_PRIV_KEY, + nonce, + 200, + CHAIN_ID_TESTNET, + &to_addr(&FAUCET_PRIV_KEY), + &contract_name, + "bar", + &[ClarityValue::UInt(1)], + ); + nonce += 1; + let tx_contract_call = + StacksTransaction::consensus_deserialize(&mut tx_bytes.as_slice()).unwrap(); + + epoch_blocks.insert( + epoch, + vec![TestBlock { + transactions: vec![tx_contract_deploy, tx_contract_call], + }], + ); + }); let test_vector = ConsensusTestVector { initial_balances: vec![], diff --git a/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_block_with_contract_call_success.snap b/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_block_with_contract_call_success.snap index 8aec4b5474..2c231d7026 100644 --- a/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_block_with_contract_call_success.snap +++ b/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_block_with_contract_call_success.snap @@ -4,7 +4,7 @@ expression: result --- [ Success(ExpectedBlockOutput( - marf_hash: "186c8e49bcfc59bb67ed22f031f009a44681f296392e0f92bed520918ba463ae", + marf_hash: "2149237f0e2a3407eed8733d38bce3db1f3ee1c14ed903c21f59546773174f4f", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -42,7 +42,7 @@ expression: result ), )), Success(ExpectedBlockOutput( - marf_hash: "ad23713f072473cad6a32125ed5fa822bb62bbfae8ed2302209c12d2f1958128", + marf_hash: "4742e535aebef843720867558b9e2be6148e95157f1fc259e24d162b6c5b78b0", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -80,7 +80,7 @@ expression: result ), )), Success(ExpectedBlockOutput( - marf_hash: "021bd30b09b5ac6ff34abd11f05244a966af937b584b1752f272cd717bb25f1d", + marf_hash: "75b37d37b1f171eb01fa71a1629e5cab10f2c5cb852b2532b0d4bd311bc94960", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -118,7 +118,7 @@ expression: result ), )), Success(ExpectedBlockOutput( - marf_hash: "416e728daeec4de695c89d15eede8ddb7b85fb4af82daffb1e0d8166a3e93451", + marf_hash: "eabaa1042075ab7afd7721584a590ee8f8542ad4743adc41ed3b1dbe9078a5b4", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( diff --git a/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_empty_blocks.snap b/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_empty_blocks.snap index a1f13d92b8..ea2c09ad3e 100644 --- a/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_empty_blocks.snap +++ b/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_empty_blocks.snap @@ -3,48 +3,8 @@ source: stackslib/src/chainstate/tests/consensus.rs expression: result --- [ - Success(ExpectedBlockOutput( - marf_hash: "f1934080b22ef0192cfb39710690e7cb0efa9cff950832b33544bde3aa1484a5", - transactions: [], - total_block_cost: ExecutionCost( - write_length: 0, - write_count: 0, - read_length: 0, - read_count: 0, - runtime: 0, - ), - )), - Success(ExpectedBlockOutput( - marf_hash: "a05f1383613215f5789eb977e4c62dfbb789d90964e14865d109375f7f6dc3cf", - transactions: [], - total_block_cost: ExecutionCost( - write_length: 0, - write_count: 0, - read_length: 0, - read_count: 0, - runtime: 0, - ), - )), - Success(ExpectedBlockOutput( - marf_hash: "c17829daff8746329c65ae658f4087519c6a8bd8c7f21e51644ddbc9c010390f", - transactions: [], - total_block_cost: ExecutionCost( - write_length: 0, - write_count: 0, - read_length: 0, - read_count: 0, - runtime: 0, - ), - )), - Success(ExpectedBlockOutput( - marf_hash: "23ecbcb91cac914ba3994a15f3ea7189bcab4e9762530cd0e6c7d237fcd6dc78", - transactions: [], - total_block_cost: ExecutionCost( - write_length: 0, - write_count: 0, - read_length: 0, - read_count: 0, - runtime: 0, - ), - )), + Failure("Invalid Nakamoto block: failed static transaction checks"), + Failure("Invalid Nakamoto block: failed static transaction checks"), + Failure("Invalid Nakamoto block: failed static transaction checks"), + Failure("Invalid Nakamoto block: failed static transaction checks"), ] diff --git a/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_stx_transfers_success.snap b/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_stx_transfers_success.snap index c4be6d8a74..ef8280179a 100644 --- a/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_stx_transfers_success.snap +++ b/stackslib/src/chainstate/tests/snapshots/blockstack_lib__chainstate__tests__consensus__append_stx_transfers_success.snap @@ -4,7 +4,7 @@ expression: result --- [ Success(ExpectedBlockOutput( - marf_hash: "63ea49669d2216ebc7e4f8b5e1cd2c99b8aff9806794adf87dcf709c0a244798", + marf_hash: "cc77d584dea4a29e4d15efffc3306a0e6513d5b516903121c3c149cd85600d5d", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -55,7 +55,7 @@ expression: result ), )), Success(ExpectedBlockOutput( - marf_hash: "7fc538e605a4a353871c4a655ae850fe9a70c3875b65f2bb42ea3bef5effed2c", + marf_hash: "8e80ece06d148b967241484040d26041c817ad9d8753a5d8d2afd284d0e172bc", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -106,7 +106,7 @@ expression: result ), )), Success(ExpectedBlockOutput( - marf_hash: "4d5c9a6d07806ac5006137de22b083de66fff7119143dd5cd92e4a457d66e028", + marf_hash: "aeb567f75a6a551252cedbbd882060d46dda38f0d949431b503fd435664338da", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData( @@ -157,7 +157,7 @@ expression: result ), )), Success(ExpectedBlockOutput( - marf_hash: "66eed8c0ab31db111a5adcc83d38a7004c6e464e3b9fb9f52ec589bc6d5f2d32", + marf_hash: "39a1ec92bc388262902593e82da7af6e0cc12412bd566974cebb7f7e9f4e67ce", transactions: [ ExpectedTransactionOutput( return_type: Response(ResponseData(