Skip to content

Commit a688f1c

Browse files
authored
Merge pull request #3450 from TheBlueMatt/2024-12-no-prune-with-preimages
Ensure monitors are not archived if they have a preimage we need
2 parents 94411bc + 8ab7222 commit a688f1c

File tree

2 files changed

+133
-38
lines changed

2 files changed

+133
-38
lines changed

lightning/src/chain/channelmonitor.rs

+23-12
Original file line numberDiff line numberDiff line change
@@ -2000,7 +2000,10 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitor<Signer> {
20002000
}
20012001

20022002
/// Checks if the monitor is fully resolved. Resolved monitor is one that has claimed all of
2003-
/// its outputs and balances (i.e. [`Self::get_claimable_balances`] returns an empty set).
2003+
/// its outputs and balances (i.e. [`Self::get_claimable_balances`] returns an empty set) and
2004+
/// which does not have any payment preimages for HTLCs which are still pending on other
2005+
/// channels.
2006+
///
20042007
/// Additionally may update state to track when the balances set became empty.
20052008
///
20062009
/// This function returns a tuple of two booleans, the first indicating whether the monitor is
@@ -2019,33 +2022,41 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitor<Signer> {
20192022
is_all_funds_claimed = false;
20202023
}
20212024

2025+
// As long as HTLCs remain unresolved, they'll be present as a `Balance`. After that point,
2026+
// if they contained a preimage, an event will appear in `pending_monitor_events` which,
2027+
// once processed, implies the preimage exists in the corresponding inbound channel.
2028+
let preimages_not_needed_elsewhere = inner.pending_monitor_events.is_empty();
2029+
20222030
const BLOCKS_THRESHOLD: u32 = 4032; // ~four weeks
2023-
match (inner.balances_empty_height, is_all_funds_claimed) {
2024-
(Some(balances_empty_height), true) => {
2031+
match (inner.balances_empty_height, is_all_funds_claimed, preimages_not_needed_elsewhere) {
2032+
(Some(balances_empty_height), true, true) => {
20252033
// Claimed all funds, check if reached the blocks threshold.
20262034
(current_height >= balances_empty_height + BLOCKS_THRESHOLD, false)
20272035
},
2028-
(Some(_), false) => {
2029-
// previously assumed we claimed all funds, but we have new funds to claim.
2030-
// Should not happen in practice.
2031-
debug_assert!(false, "Thought we were done claiming funds, but claimable_balances now has entries");
2036+
(Some(_), false, _)|(Some(_), _, false) => {
2037+
// previously assumed we claimed all funds, but we have new funds to claim or
2038+
// preimages are suddenly needed (because of a duplicate-hash HTLC).
2039+
// This should never happen as once the `Balance`s and preimages are clear, we
2040+
// should never create new ones.
2041+
debug_assert!(false,
2042+
"Thought we were done claiming funds, but claimable_balances now has entries");
20322043
log_error!(logger,
20332044
"WARNING: LDK thought it was done claiming all the available funds in the ChannelMonitor for channel {}, but later decided it had more to claim. This is potentially an important bug in LDK, please report it at https://github.com/lightningdevkit/rust-lightning/issues/new",
20342045
inner.get_funding_txo().0);
20352046
inner.balances_empty_height = None;
20362047
(false, true)
20372048
},
2038-
(None, true) => {
2039-
// Claimed all funds but `balances_empty_height` is None. It is set to the
2040-
// current block height.
2049+
(None, true, true) => {
2050+
// Claimed all funds and preimages can be deleted, but `balances_empty_height` is
2051+
// None. It is set to the current block height.
20412052
log_debug!(logger,
20422053
"ChannelMonitor funded at {} is now fully resolved. It will become archivable in {} blocks",
20432054
inner.get_funding_txo().0, BLOCKS_THRESHOLD);
20442055
inner.balances_empty_height = Some(current_height);
20452056
(false, true)
20462057
},
2047-
(None, false) => {
2048-
// Have funds to claim.
2058+
(None, false, _)|(None, _, false) => {
2059+
// Have funds to claim or our preimages are still needed.
20492060
(false, false)
20502061
},
20512062
}

lightning/src/ln/monitor_tests.rs

+110-26
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ fn revoked_output_htlc_resolution_timing() {
161161

162162
#[test]
163163
fn archive_fully_resolved_monitors() {
164-
// Test we can archive fully resolved channel monitor.
164+
// Test we archive fully resolved channel monitors at the right time.
165165
let chanmon_cfgs = create_chanmon_cfgs(2);
166166
let node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
167167
let mut user_config = test_default_channel_config();
@@ -171,46 +171,130 @@ fn archive_fully_resolved_monitors() {
171171
let (_, _, chan_id, funding_tx) =
172172
create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 1_000_000);
173173

174-
nodes[0].node.close_channel(&chan_id, &nodes[1].node.get_our_node_id()).unwrap();
175-
let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
176-
nodes[1].node.handle_shutdown(nodes[0].node.get_our_node_id(), &node_0_shutdown);
177-
let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
178-
nodes[0].node.handle_shutdown(nodes[1].node.get_our_node_id(), &node_1_shutdown);
174+
let (payment_preimage, payment_hash, ..) = route_payment(&nodes[0], &[&nodes[1]], 10_000_000);
179175

180-
let node_0_closing_signed = get_event_msg!(nodes[0], MessageSendEvent::SendClosingSigned, nodes[1].node.get_our_node_id());
181-
nodes[1].node.handle_closing_signed(nodes[0].node.get_our_node_id(), &node_0_closing_signed);
182-
let node_1_closing_signed = get_event_msg!(nodes[1], MessageSendEvent::SendClosingSigned, nodes[0].node.get_our_node_id());
183-
nodes[0].node.handle_closing_signed(nodes[1].node.get_our_node_id(), &node_1_closing_signed);
184-
let (_, node_0_2nd_closing_signed) = get_closing_signed_broadcast!(nodes[0].node, nodes[1].node.get_our_node_id());
185-
nodes[1].node.handle_closing_signed(nodes[0].node.get_our_node_id(), &node_0_2nd_closing_signed.unwrap());
186-
let (_, _) = get_closing_signed_broadcast!(nodes[1].node, nodes[0].node.get_our_node_id());
176+
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), "".to_owned()).unwrap();
177+
check_added_monitors!(nodes[0], 1);
178+
check_closed_broadcast!(nodes[0], true);
179+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 1_000_000);
187180

188-
let shutdown_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
181+
let commitment_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
182+
assert_eq!(commitment_tx.len(), 1);
189183

190-
mine_transaction(&nodes[0], &shutdown_tx[0]);
191-
mine_transaction(&nodes[1], &shutdown_tx[0]);
184+
mine_transaction(&nodes[0], &commitment_tx[0]);
185+
mine_transaction(&nodes[1], &commitment_tx[0]);
186+
let reason = ClosureReason::CommitmentTxConfirmed;
187+
check_closed_event!(nodes[1], 1, reason, [nodes[0].node.get_our_node_id()], 1_000_000);
188+
check_closed_broadcast(&nodes[1], 1, true);
189+
check_added_monitors(&nodes[1], 1);
192190

193-
connect_blocks(&nodes[0], 6);
191+
connect_blocks(&nodes[0], BREAKDOWN_TIMEOUT as u32);
194192
connect_blocks(&nodes[1], 6);
195193

196-
check_closed_event!(nodes[0], 1, ClosureReason::LocallyInitiatedCooperativeClosure, [nodes[1].node.get_our_node_id()], 1000000);
197-
check_closed_event!(nodes[1], 1, ClosureReason::CounterpartyInitiatedCooperativeClosure, [nodes[0].node.get_our_node_id()], 1000000);
194+
// After the commitment transaction reaches enough confirmations for nodes[0] to claim its
195+
// balance, both nodes should still have a pending `Balance` as the HTLC has not yet resolved.
196+
assert!(!nodes[0].chain_monitor.chain_monitor.get_claimable_balances(&[]).is_empty());
197+
assert!(!nodes[1].chain_monitor.chain_monitor.get_claimable_balances(&[]).is_empty());
198+
199+
let spendable_event = nodes[0].chain_monitor.chain_monitor.get_and_clear_pending_events();
200+
assert_eq!(spendable_event.len(), 1);
201+
if let Event::SpendableOutputs { .. } = spendable_event[0] {} else { panic!(); }
198202

203+
// Until the `Balance` set of both monitors goes empty, calling
204+
// `archive_fully_resolved_channel_monitors` will do nothing (though we don't bother to observe
205+
// that except later by checking that the monitors are archived at the exact correct block
206+
// height).
207+
nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
199208
assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1);
200-
// First archive should set balances_empty_height to current block height
209+
nodes[1].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
210+
assert_eq!(nodes[1].chain_monitor.chain_monitor.list_monitors().len(), 1);
211+
212+
nodes[1].node.claim_funds(payment_preimage);
213+
check_added_monitors(&nodes[1], 1);
214+
expect_payment_claimed!(nodes[1], payment_hash, 10_000_000);
215+
let htlc_claim_tx = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
216+
assert_eq!(htlc_claim_tx.len(), 1);
217+
218+
mine_transaction(&nodes[0], &htlc_claim_tx[0]);
219+
mine_transaction(&nodes[1], &htlc_claim_tx[0]);
220+
221+
// Both nodes should retain the pending `Balance` until the HTLC resolution transaction has six
222+
// confirmations
223+
assert!(!nodes[0].chain_monitor.chain_monitor.get_claimable_balances(&[]).is_empty());
224+
assert!(!nodes[1].chain_monitor.chain_monitor.get_claimable_balances(&[]).is_empty());
225+
226+
// Until the `Balance` set of both monitors goes empty, calling
227+
// `archive_fully_resolved_channel_monitors` will do nothing (though we don't bother to observe
228+
// that except later by checking that the monitors are archived at the exact correct block
229+
// height).
230+
nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
231+
assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1);
232+
nodes[1].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
233+
assert_eq!(nodes[1].chain_monitor.chain_monitor.list_monitors().len(), 1);
234+
235+
connect_blocks(&nodes[0], 5);
236+
connect_blocks(&nodes[1], 5);
237+
238+
assert!(nodes[0].chain_monitor.chain_monitor.get_claimable_balances(&[]).is_empty());
239+
assert!(nodes[1].chain_monitor.chain_monitor.get_claimable_balances(&[]).is_empty());
240+
241+
// At this point, both nodes have no more `Balance`s, but nodes[0]'s `ChannelMonitor` still
242+
// hasn't had the `MonitorEvent` that contains the preimage claimed by the `ChannelManager`.
243+
// Thus, calling `archive_fully_resolved_channel_monitors` and waiting 4032 blocks will not
244+
// result in the `ChannelMonitor` being archived.
201245
nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
202246
assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1);
203247
connect_blocks(&nodes[0], 4032);
204-
// Second call after 4032 blocks, should archive the monitor
205248
nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
206-
// Should have no monitors left
249+
assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1);
250+
251+
// ...however, nodes[1]'s `ChannelMonitor` is ready to be archived, and will be in exactly 4032
252+
// blocks.
253+
nodes[1].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
254+
assert_eq!(nodes[1].chain_monitor.chain_monitor.list_monitors().len(), 1);
255+
connect_blocks(&nodes[1], 4031);
256+
nodes[1].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
257+
assert_eq!(nodes[1].chain_monitor.chain_monitor.list_monitors().len(), 1);
258+
connect_blocks(&nodes[1], 1);
259+
nodes[1].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
260+
assert_eq!(nodes[1].chain_monitor.chain_monitor.list_monitors().len(), 0);
261+
262+
// Finally, we process the pending `MonitorEvent` from nodes[0], allowing the `ChannelMonitor`
263+
// to be archived 4032 blocks later.
264+
expect_payment_sent(&nodes[0], payment_preimage, None, true, false);
265+
nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
266+
assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1);
267+
connect_blocks(&nodes[0], 4031);
268+
nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
269+
assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1);
270+
connect_blocks(&nodes[0], 1);
271+
nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
207272
assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 0);
273+
208274
// Remove the corresponding outputs and transactions the chain source is
209275
// watching. This is to make sure the `Drop` function assertions pass.
210-
nodes.get_mut(0).unwrap().chain_source.remove_watched_txn_and_outputs(
211-
OutPoint { txid: funding_tx.compute_txid(), index: 0 },
212-
funding_tx.output[0].script_pubkey.clone()
213-
);
276+
for node in nodes {
277+
node.chain_source.remove_watched_txn_and_outputs(
278+
OutPoint { txid: funding_tx.compute_txid(), index: 0 },
279+
funding_tx.output[0].script_pubkey.clone()
280+
);
281+
node.chain_source.remove_watched_txn_and_outputs(
282+
OutPoint { txid: commitment_tx[0].compute_txid(), index: 0 },
283+
commitment_tx[0].output[0].script_pubkey.clone()
284+
);
285+
node.chain_source.remove_watched_txn_and_outputs(
286+
OutPoint { txid: commitment_tx[0].compute_txid(), index: 1 },
287+
commitment_tx[0].output[1].script_pubkey.clone()
288+
);
289+
node.chain_source.remove_watched_txn_and_outputs(
290+
OutPoint { txid: commitment_tx[0].compute_txid(), index: 2 },
291+
commitment_tx[0].output[2].script_pubkey.clone()
292+
);
293+
node.chain_source.remove_watched_txn_and_outputs(
294+
OutPoint { txid: htlc_claim_tx[0].compute_txid(), index: 0 },
295+
htlc_claim_tx[0].output[0].script_pubkey.clone()
296+
);
297+
}
214298
}
215299

216300
fn do_chanmon_claim_value_coop_close(anchors: bool) {

0 commit comments

Comments
 (0)