@@ -161,7 +161,7 @@ fn revoked_output_htlc_resolution_timing() {
161161
162162#[ test]
163163fn archive_fully_resolved_monitors ( ) {
164- // Test we can archive fully resolved channel monitor .
164+ // Test we archive fully resolved channel monitors at the right time .
165165 let chanmon_cfgs = create_chanmon_cfgs ( 2 ) ;
166166 let node_cfgs = create_node_cfgs ( 2 , & chanmon_cfgs) ;
167167 let mut user_config = test_default_channel_config ( ) ;
@@ -171,46 +171,130 @@ fn archive_fully_resolved_monitors() {
171171 let ( _, _, chan_id, funding_tx) =
172172 create_announced_chan_between_nodes_with_value ( & nodes, 0 , 1 , 1_000_000 , 1_000_000 ) ;
173173
174- nodes[ 0 ] . node . close_channel ( & chan_id, & nodes[ 1 ] . node . get_our_node_id ( ) ) . unwrap ( ) ;
175- let node_0_shutdown = get_event_msg ! ( nodes[ 0 ] , MessageSendEvent :: SendShutdown , nodes[ 1 ] . node. get_our_node_id( ) ) ;
176- nodes[ 1 ] . node . handle_shutdown ( nodes[ 0 ] . node . get_our_node_id ( ) , & node_0_shutdown) ;
177- let node_1_shutdown = get_event_msg ! ( nodes[ 1 ] , MessageSendEvent :: SendShutdown , nodes[ 0 ] . node. get_our_node_id( ) ) ;
178- nodes[ 0 ] . node . handle_shutdown ( nodes[ 1 ] . node . get_our_node_id ( ) , & node_1_shutdown) ;
174+ let ( payment_preimage, payment_hash, ..) = route_payment ( & nodes[ 0 ] , & [ & nodes[ 1 ] ] , 10_000_000 ) ;
179175
180- let node_0_closing_signed = get_event_msg ! ( nodes[ 0 ] , MessageSendEvent :: SendClosingSigned , nodes[ 1 ] . node. get_our_node_id( ) ) ;
181- nodes[ 1 ] . node . handle_closing_signed ( nodes[ 0 ] . node . get_our_node_id ( ) , & node_0_closing_signed) ;
182- let node_1_closing_signed = get_event_msg ! ( nodes[ 1 ] , MessageSendEvent :: SendClosingSigned , nodes[ 0 ] . node. get_our_node_id( ) ) ;
183- nodes[ 0 ] . node . handle_closing_signed ( nodes[ 1 ] . node . get_our_node_id ( ) , & node_1_closing_signed) ;
184- let ( _, node_0_2nd_closing_signed) = get_closing_signed_broadcast ! ( nodes[ 0 ] . node, nodes[ 1 ] . node. get_our_node_id( ) ) ;
185- nodes[ 1 ] . node . handle_closing_signed ( nodes[ 0 ] . node . get_our_node_id ( ) , & node_0_2nd_closing_signed. unwrap ( ) ) ;
186- let ( _, _) = get_closing_signed_broadcast ! ( nodes[ 1 ] . node, nodes[ 0 ] . node. get_our_node_id( ) ) ;
176+ nodes[ 0 ] . node . force_close_broadcasting_latest_txn ( & chan_id, & nodes[ 1 ] . node . get_our_node_id ( ) , "" . to_owned ( ) ) . unwrap ( ) ;
177+ check_added_monitors ! ( nodes[ 0 ] , 1 ) ;
178+ check_closed_broadcast ! ( nodes[ 0 ] , true ) ;
179+ check_closed_event ! ( nodes[ 0 ] , 1 , ClosureReason :: HolderForceClosed { broadcasted_latest_txn: Some ( true ) } , [ nodes[ 1 ] . node. get_our_node_id( ) ] , 1_000_000 ) ;
187180
188- let shutdown_tx = nodes[ 0 ] . tx_broadcaster . txn_broadcasted . lock ( ) . unwrap ( ) . split_off ( 0 ) ;
181+ let commitment_tx = nodes[ 0 ] . tx_broadcaster . txn_broadcasted . lock ( ) . unwrap ( ) . split_off ( 0 ) ;
182+ assert_eq ! ( commitment_tx. len( ) , 1 ) ;
189183
190- mine_transaction ( & nodes[ 0 ] , & shutdown_tx[ 0 ] ) ;
191- mine_transaction ( & nodes[ 1 ] , & shutdown_tx[ 0 ] ) ;
184+ mine_transaction ( & nodes[ 0 ] , & commitment_tx[ 0 ] ) ;
185+ mine_transaction ( & nodes[ 1 ] , & commitment_tx[ 0 ] ) ;
186+ let reason = ClosureReason :: CommitmentTxConfirmed ;
187+ check_closed_event ! ( nodes[ 1 ] , 1 , reason, [ nodes[ 0 ] . node. get_our_node_id( ) ] , 1_000_000 ) ;
188+ check_closed_broadcast ( & nodes[ 1 ] , 1 , true ) ;
189+ check_added_monitors ( & nodes[ 1 ] , 1 ) ;
192190
193- connect_blocks ( & nodes[ 0 ] , 6 ) ;
191+ connect_blocks ( & nodes[ 0 ] , BREAKDOWN_TIMEOUT as u32 ) ;
194192 connect_blocks ( & nodes[ 1 ] , 6 ) ;
195193
196- check_closed_event ! ( nodes[ 0 ] , 1 , ClosureReason :: LocallyInitiatedCooperativeClosure , [ nodes[ 1 ] . node. get_our_node_id( ) ] , 1000000 ) ;
197- check_closed_event ! ( nodes[ 1 ] , 1 , ClosureReason :: CounterpartyInitiatedCooperativeClosure , [ nodes[ 0 ] . node. get_our_node_id( ) ] , 1000000 ) ;
194+ // After the commitment transaction reaches enough confirmations for nodes[0] to claim its
195+ // balance, both nodes should still have a pending `Balance` as the HTLC has not yet resolved.
196+ assert ! ( !nodes[ 0 ] . chain_monitor. chain_monitor. get_claimable_balances( & [ ] ) . is_empty( ) ) ;
197+ assert ! ( !nodes[ 1 ] . chain_monitor. chain_monitor. get_claimable_balances( & [ ] ) . is_empty( ) ) ;
198+
199+ let spendable_event = nodes[ 0 ] . chain_monitor . chain_monitor . get_and_clear_pending_events ( ) ;
200+ assert_eq ! ( spendable_event. len( ) , 1 ) ;
201+ if let Event :: SpendableOutputs { .. } = spendable_event[ 0 ] { } else { panic ! ( ) ; }
198202
203+ // Until the `Balance` set of both monitors goes empty, calling
204+ // `archive_fully_resolved_channel_monitors` will do nothing (though we don't bother to observe
205+ // that except later by checking that the monitors are archived at the exact correct block
206+ // height).
207+ nodes[ 0 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
199208 assert_eq ! ( nodes[ 0 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
200- // First archive should set balances_empty_height to current block height
209+ nodes[ 1 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
210+ assert_eq ! ( nodes[ 1 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
211+
212+ nodes[ 1 ] . node . claim_funds ( payment_preimage) ;
213+ check_added_monitors ( & nodes[ 1 ] , 1 ) ;
214+ expect_payment_claimed ! ( nodes[ 1 ] , payment_hash, 10_000_000 ) ;
215+ let htlc_claim_tx = nodes[ 1 ] . tx_broadcaster . txn_broadcasted . lock ( ) . unwrap ( ) . split_off ( 0 ) ;
216+ assert_eq ! ( htlc_claim_tx. len( ) , 1 ) ;
217+
218+ mine_transaction ( & nodes[ 0 ] , & htlc_claim_tx[ 0 ] ) ;
219+ mine_transaction ( & nodes[ 1 ] , & htlc_claim_tx[ 0 ] ) ;
220+
221+ // Both nodes should retain the pending `Balance` until the HTLC resolution transaction has six
222+ // confirmations
223+ assert ! ( !nodes[ 0 ] . chain_monitor. chain_monitor. get_claimable_balances( & [ ] ) . is_empty( ) ) ;
224+ assert ! ( !nodes[ 1 ] . chain_monitor. chain_monitor. get_claimable_balances( & [ ] ) . is_empty( ) ) ;
225+
226+ // Until the `Balance` set of both monitors goes empty, calling
227+ // `archive_fully_resolved_channel_monitors` will do nothing (though we don't bother to observe
228+ // that except later by checking that the monitors are archived at the exact correct block
229+ // height).
230+ nodes[ 0 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
231+ assert_eq ! ( nodes[ 0 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
232+ nodes[ 1 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
233+ assert_eq ! ( nodes[ 1 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
234+
235+ connect_blocks ( & nodes[ 0 ] , 5 ) ;
236+ connect_blocks ( & nodes[ 1 ] , 5 ) ;
237+
238+ assert ! ( nodes[ 0 ] . chain_monitor. chain_monitor. get_claimable_balances( & [ ] ) . is_empty( ) ) ;
239+ assert ! ( nodes[ 1 ] . chain_monitor. chain_monitor. get_claimable_balances( & [ ] ) . is_empty( ) ) ;
240+
241+ // At this point, both nodes have no more `Balance`s, but nodes[0]'s `ChannelMonitor` still
242+ // hasn't had the `MonitorEvent` that contains the preimage claimed by the `ChannelManager`.
243+ // Thus, calling `archive_fully_resolved_channel_monitors` and waiting 4032 blocks will not
244+ // result in the `ChannelMonitor` being archived.
201245 nodes[ 0 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
202246 assert_eq ! ( nodes[ 0 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
203247 connect_blocks ( & nodes[ 0 ] , 4032 ) ;
204- // Second call after 4032 blocks, should archive the monitor
205248 nodes[ 0 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
206- // Should have no monitors left
249+ assert_eq ! ( nodes[ 0 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
250+
251+ // ...however, nodes[1]'s `ChannelMonitor` is ready to be archived, and will be in exactly 4032
252+ // blocks.
253+ nodes[ 1 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
254+ assert_eq ! ( nodes[ 1 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
255+ connect_blocks ( & nodes[ 1 ] , 4031 ) ;
256+ nodes[ 1 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
257+ assert_eq ! ( nodes[ 1 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
258+ connect_blocks ( & nodes[ 1 ] , 1 ) ;
259+ nodes[ 1 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
260+ assert_eq ! ( nodes[ 1 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 0 ) ;
261+
262+ // Finally, we process the pending `MonitorEvent` from nodes[0], allowing the `ChannelMonitor`
263+ // to be archived 4032 blocks later.
264+ expect_payment_sent ( & nodes[ 0 ] , payment_preimage, None , true , false ) ;
265+ nodes[ 0 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
266+ assert_eq ! ( nodes[ 0 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
267+ connect_blocks ( & nodes[ 0 ] , 4031 ) ;
268+ nodes[ 0 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
269+ assert_eq ! ( nodes[ 0 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 1 ) ;
270+ connect_blocks ( & nodes[ 0 ] , 1 ) ;
271+ nodes[ 0 ] . chain_monitor . chain_monitor . archive_fully_resolved_channel_monitors ( ) ;
207272 assert_eq ! ( nodes[ 0 ] . chain_monitor. chain_monitor. list_monitors( ) . len( ) , 0 ) ;
273+
208274 // Remove the corresponding outputs and transactions the chain source is
209275 // watching. This is to make sure the `Drop` function assertions pass.
210- nodes. get_mut ( 0 ) . unwrap ( ) . chain_source . remove_watched_txn_and_outputs (
211- OutPoint { txid : funding_tx. compute_txid ( ) , index : 0 } ,
212- funding_tx. output [ 0 ] . script_pubkey . clone ( )
213- ) ;
276+ for node in nodes {
277+ node. chain_source . remove_watched_txn_and_outputs (
278+ OutPoint { txid : funding_tx. compute_txid ( ) , index : 0 } ,
279+ funding_tx. output [ 0 ] . script_pubkey . clone ( )
280+ ) ;
281+ node. chain_source . remove_watched_txn_and_outputs (
282+ OutPoint { txid : commitment_tx[ 0 ] . compute_txid ( ) , index : 0 } ,
283+ commitment_tx[ 0 ] . output [ 0 ] . script_pubkey . clone ( )
284+ ) ;
285+ node. chain_source . remove_watched_txn_and_outputs (
286+ OutPoint { txid : commitment_tx[ 0 ] . compute_txid ( ) , index : 1 } ,
287+ commitment_tx[ 0 ] . output [ 1 ] . script_pubkey . clone ( )
288+ ) ;
289+ node. chain_source . remove_watched_txn_and_outputs (
290+ OutPoint { txid : commitment_tx[ 0 ] . compute_txid ( ) , index : 2 } ,
291+ commitment_tx[ 0 ] . output [ 2 ] . script_pubkey . clone ( )
292+ ) ;
293+ node. chain_source . remove_watched_txn_and_outputs (
294+ OutPoint { txid : htlc_claim_tx[ 0 ] . compute_txid ( ) , index : 0 } ,
295+ htlc_claim_tx[ 0 ] . output [ 0 ] . script_pubkey . clone ( )
296+ ) ;
297+ }
214298}
215299
216300fn do_chanmon_claim_value_coop_close ( anchors : bool ) {
0 commit comments