Skip to content

Allow for user-specified error message during force close channel #2889

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions fuzz/src/full_stack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,7 @@ struct MoneyLossDetector<'a> {
height: usize,
max_height: usize,
blocks_connected: u32,
error_message: String,
}
impl<'a> MoneyLossDetector<'a> {
pub fn new(peers: &'a RefCell<[bool; 256]>,
Expand All @@ -234,6 +235,7 @@ impl<'a> MoneyLossDetector<'a> {
height: 0,
max_height: 0,
blocks_connected: 0,
error_message: "Channel force-closed".to_string(),
}
}

Expand Down Expand Up @@ -288,7 +290,7 @@ impl<'a> Drop for MoneyLossDetector<'a> {
}

// Force all channels onto the chain (and time out claim txn)
self.manager.force_close_all_channels_broadcasting_latest_txn();
self.manager.force_close_all_channels_broadcasting_latest_txn(self.error_message.to_string());
}
}
}
Expand Down Expand Up @@ -731,9 +733,10 @@ pub fn do_test(mut data: &[u8], logger: &Arc<dyn Logger>) {
14 => {
let mut channels = channelmanager.list_channels();
let channel_id = get_slice!(1)[0] as usize;
let error_message = "Channel force-closed";
if channel_id >= channels.len() { return; }
channels.sort_by(|a, b| { a.channel_id.cmp(&b.channel_id) });
channelmanager.force_close_broadcasting_latest_txn(&channels[channel_id].channel_id, &channels[channel_id].counterparty.node_id).unwrap();
channelmanager.force_close_broadcasting_latest_txn(&channels[channel_id].channel_id, &channels[channel_id].counterparty.node_id, error_message.to_string()).unwrap();
},
// 15, 16, 17, 18 is above
19 => {
Expand Down
6 changes: 4 additions & 2 deletions lightning-background-processor/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1416,7 +1416,8 @@ mod tests {
}

// Force-close the channel.
nodes[0].node.force_close_broadcasting_latest_txn(&ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.txid(), index: 0 }), &nodes[1].node.get_our_node_id()).unwrap();
let error_message = "Channel force-closed";
nodes[0].node.force_close_broadcasting_latest_txn(&ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.txid(), index: 0 }), &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();

// Check that the force-close updates are persisted.
check_persisted_data!(nodes[0].node, filepath.clone());
Expand Down Expand Up @@ -1609,7 +1610,8 @@ mod tests {
let bg_processor = BackgroundProcessor::start(persister, event_handler, nodes[0].chain_monitor.clone(), nodes[0].node.clone(), nodes[0].no_gossip_sync(), nodes[0].peer_manager.clone(), nodes[0].logger.clone(), Some(nodes[0].scorer.clone()));

// Force close the channel and check that the SpendableOutputs event was handled.
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
let error_message = "Channel force-closed";
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
let commitment_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
confirm_transaction_depth(&mut nodes[0], &commitment_tx, BREAKDOWN_TIMEOUT as u32);

Expand Down
6 changes: 4 additions & 2 deletions lightning-persister/src/fs_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -445,7 +445,8 @@ mod tests {
let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
let error_message = "Channel force-closed";
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
Expand Down Expand Up @@ -484,7 +485,8 @@ mod tests {
let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
let error_message = "Channel force-closed";
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
Expand Down
3 changes: 2 additions & 1 deletion lightning-persister/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,8 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {

// Force close because cooperative close doesn't result in any persisted
// updates.
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
let error_message = "Channel force-closed";
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
check_closed_broadcast!(nodes[0], true);
check_added_monitors!(nodes[0], 1);
Expand Down
3 changes: 2 additions & 1 deletion lightning/src/ln/async_signer_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -363,12 +363,13 @@ fn do_test_async_holder_signatures(anchors: bool, remote_commitment: bool) {
// Route an HTLC and set the signer as unavailable.
let (_, _, chan_id, funding_tx) = create_announced_chan_between_nodes(&nodes, 0, 1);
route_payment(&nodes[0], &[&nodes[1]], 1_000_000);
let error_message = "Channel force-closed";

nodes[0].set_channel_signer_available(&nodes[1].node.get_our_node_id(), &chan_id, false);

if remote_commitment {
// Make the counterparty broadcast its latest commitment.
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[0].node.get_our_node_id()).unwrap();
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
check_added_monitors(&nodes[1], 1);
check_closed_broadcast(&nodes[1], 1, true);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100_000);
Expand Down
3 changes: 2 additions & 1 deletion lightning/src/ln/blinded_payment_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -457,6 +457,7 @@ fn do_forward_fail_in_process_pending_htlc_fwds(check: ProcessPendingHTLCsCheck,
(chan.0.contents, chan.2)
};

let error_message = "Channel force-closed";
let amt_msat = 5000;
let (_, payment_hash, payment_secret) = get_payment_preimage_hash(&nodes[2], Some(amt_msat), None);
let route_params = get_blinded_route_parameters(amt_msat, payment_secret, 1, 1_0000_0000,
Expand Down Expand Up @@ -489,7 +490,7 @@ fn do_forward_fail_in_process_pending_htlc_fwds(check: ProcessPendingHTLCsCheck,
ProcessPendingHTLCsCheck::FwdChannelClosed => {
// Force close the next-hop channel so when we go to forward in process_pending_htlc_forwards,
// the intro node will error backwards.
$curr_node.node.force_close_broadcasting_latest_txn(&$failed_chan_id, &$next_node.node.get_our_node_id()).unwrap();
$curr_node.node.force_close_broadcasting_latest_txn(&$failed_chan_id, &$next_node.node.get_our_node_id(), error_message.to_string()).unwrap();
let events = $curr_node.node.get_and_clear_pending_events();
match events[0] {
crate::events::Event::PendingHTLCsForwardable { .. } => {},
Expand Down
14 changes: 9 additions & 5 deletions lightning/src/ln/chanmon_update_fail_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,8 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
}

// ...and make sure we can force-close a frozen channel
nodes[0].node.force_close_broadcasting_latest_txn(&channel_id, &nodes[1].node.get_our_node_id()).unwrap();
let error_message = "Channel force-closed";
nodes[0].node.force_close_broadcasting_latest_txn(&channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_added_monitors!(nodes[0], 1);
check_closed_broadcast!(nodes[0], true);

Expand Down Expand Up @@ -3221,17 +3222,18 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
let _ = get_revoke_commit_msgs!(nodes[1], nodes[2].node.get_our_node_id());

let mon_bc = get_monitor!(nodes[1], chan_id_bc).encode();
let error_message = "Channel force-closed";

if close_chans_before_reload {
if !close_only_a {
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_bc, &nodes[2].node.get_our_node_id()).unwrap();
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_bc, &nodes[2].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_broadcast(&nodes[1], 1, true);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[2].node.get_our_node_id()], 100000);
}

chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[0].node.get_our_node_id()).unwrap();
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_broadcast(&nodes[1], 1, true);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000);
}
Expand All @@ -3252,8 +3254,9 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
assert_eq!(bs_close_txn.len(), 3);
}
}
let error_message = "Channel force-closed";

nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id()).unwrap();
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000);
let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
assert_eq!(as_closing_tx.len(), 1);
Expand Down Expand Up @@ -3390,10 +3393,11 @@ fn do_test_reload_mon_update_completion_actions(close_during_reload: bool) {
let manager_b = nodes[1].node.encode();
reload_node!(nodes[1], &manager_b, &[&mon_ab, &mon_bc], persister, new_chain_monitor, nodes_1_deserialized);

let error_message = "Channel force-closed";
if close_during_reload {
// Test that we still free the B<->C channel if the A<->B channel closed while we reloaded
// (as learned about during the on-reload block connection).
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id()).unwrap();
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_added_monitors!(nodes[0], 1);
check_closed_broadcast!(nodes[0], true);
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100_000);
Expand Down
Loading