Skip to content

Commit ef131e3

Browse files
Add test coverage ChannelClosed event fields
1 parent a5dc8f2 commit ef131e3

13 files changed

+241
-195
lines changed

lightning-persister/src/lib.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,7 @@ mod tests {
237237
// Force close because cooperative close doesn't result in any persisted
238238
// updates.
239239
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
240-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
240+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, nodes[1].node.get_our_node_id(), 100000);
241241
check_closed_broadcast!(nodes[0], true);
242242
check_added_monitors!(nodes[0], 1);
243243

@@ -246,7 +246,7 @@ mod tests {
246246

247247
connect_block(&nodes[1], &create_dummy_block(nodes[0].best_block_hash(), 42, vec![node_txn[0].clone(), node_txn[0].clone()]));
248248
check_closed_broadcast!(nodes[1], true);
249-
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
249+
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed, nodes[0].node.get_our_node_id(), 100000);
250250
check_added_monitors!(nodes[1], 1);
251251

252252
// Make sure everything is persisted as expected after close.
@@ -270,7 +270,7 @@ mod tests {
270270
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
271271
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
272272
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
273-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
273+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, nodes[0].node.get_our_node_id(), 100000);
274274
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
275275
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
276276
let update_id = update_map.get(&added_monitors[0].0.to_channel_id()).unwrap();
@@ -309,7 +309,7 @@ mod tests {
309309
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
310310
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
311311
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
312-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
312+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, nodes[0].node.get_our_node_id(), 100000);
313313
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
314314
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
315315
let update_id = update_map.get(&added_monitors[0].0.to_channel_id()).unwrap();

lightning/src/chain/chainmonitor.rs

+4-2
Original file line numberDiff line numberDiff line change
@@ -966,7 +966,8 @@ mod tests {
966966
assert!(err.contains("ChannelMonitor storage failure")));
967967
check_added_monitors!(nodes[0], 2); // After the failure we generate a close-channel monitor update
968968
check_closed_broadcast!(nodes[0], true);
969-
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
969+
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
970+
nodes[1].node.get_our_node_id(), 100000);
970971

971972
// However, as the ChainMonitor is still waiting for the original persistence to complete,
972973
// it won't yet release the MonitorEvents.
@@ -1013,7 +1014,8 @@ mod tests {
10131014
// ... however once we get events once, the channel will close, creating a channel-closed
10141015
// ChannelMonitorUpdate.
10151016
check_closed_broadcast!(nodes[0], true);
1016-
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() });
1017+
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() },
1018+
nodes[1].node.get_our_node_id(), 100000);
10171019
check_added_monitors!(nodes[0], 1);
10181020
}
10191021
}

lightning/src/chain/channelmonitor.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -4274,7 +4274,8 @@ mod tests {
42744274
assert!(err.contains("ChannelMonitor storage failure")));
42754275
check_added_monitors!(nodes[1], 2); // After the failure we generate a close-channel monitor update
42764276
check_closed_broadcast!(nodes[1], true);
4277-
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
4277+
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
4278+
nodes[0].node.get_our_node_id(), 100000);
42784279

42794280
// Build a new ChannelMonitorUpdate which contains both the failing commitment tx update
42804281
// and provides the claim preimages for the two pending HTLCs. The first update generates

lightning/src/ln/chanmon_update_fail_tests.rs

+14-11
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,8 @@ fn test_simple_monitor_permanent_update_fail() {
7070
// PaymentPathFailed event
7171

7272
assert_eq!(nodes[0].node.list_channels().len(), 0);
73-
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
73+
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
74+
nodes[1].node.get_our_node_id(), 100000);
7475
}
7576

7677
#[test]
@@ -245,7 +246,7 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
245246
// PaymentPathFailed event
246247

247248
assert_eq!(nodes[0].node.list_channels().len(), 0);
248-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
249+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, nodes[1].node.get_our_node_id(), 100000);
249250
}
250251

251252
#[test]
@@ -1983,8 +1984,8 @@ fn do_during_funding_monitor_fail(confirm_a_first: bool, restore_b_before_conf:
19831984

19841985
send_payment(&nodes[0], &[&nodes[1]], 8000000);
19851986
close_channel(&nodes[0], &nodes[1], &channel_id, funding_tx, true);
1986-
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure);
1987-
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure);
1987+
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure, nodes[1].node.get_our_node_id(), 100000);
1988+
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure, nodes[0].node.get_our_node_id(), 100000);
19881989
}
19891990

19901991
#[test]
@@ -2184,7 +2185,7 @@ fn test_fail_htlc_on_broadcast_after_claim() {
21842185
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
21852186

21862187
mine_transaction(&nodes[1], &bs_txn[0]);
2187-
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
2188+
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed, nodes[2].node.get_our_node_id(), 100000);
21882189
check_closed_broadcast!(nodes[1], true);
21892190
connect_blocks(&nodes[1], ANTI_REORG_DELAY - 1);
21902191
check_added_monitors!(nodes[1], 1);
@@ -2651,8 +2652,8 @@ fn test_temporary_error_during_shutdown() {
26512652
assert_eq!(txn_a, txn_b);
26522653
assert_eq!(txn_a.len(), 1);
26532654
check_spends!(txn_a[0], funding_tx);
2654-
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure);
2655-
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure);
2655+
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure, nodes[0].node.get_our_node_id(), 100000);
2656+
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure, nodes[1].node.get_our_node_id(), 100000);
26562657
}
26572658

26582659
#[test]
@@ -2681,7 +2682,8 @@ fn test_permanent_error_during_sending_shutdown() {
26812682
if let MessageSendEvent::HandleError { .. } = msg_events[2] {} else { panic!(); }
26822683

26832684
check_added_monitors!(nodes[0], 2);
2684-
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
2685+
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
2686+
nodes[1].node.get_our_node_id(), 100000);
26852687
}
26862688

26872689
#[test]
@@ -2712,7 +2714,8 @@ fn test_permanent_error_during_handling_shutdown() {
27122714
if let MessageSendEvent::HandleError { .. } = msg_events[2] {} else { panic!(); }
27132715

27142716
check_added_monitors!(nodes[1], 2);
2715-
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
2717+
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
2718+
nodes[0].node.get_our_node_id(), 100000);
27162719
}
27172720

27182721
#[test]
@@ -2906,7 +2909,7 @@ fn do_test_outbound_reload_without_init_mon(use_0conf: bool) {
29062909
nodes[0].chain_source.watched_outputs.lock().unwrap().clear();
29072910

29082911
reload_node!(nodes[0], &nodes[0].node.encode(), &[], persister, new_chain_monitor, nodes_0_deserialized);
2909-
check_closed_event!(nodes[0], 1, ClosureReason::DisconnectedPeer);
2912+
check_closed_event!(nodes[0], 1, ClosureReason::DisconnectedPeer, nodes[1].node.get_our_node_id(), 100000);
29102913
assert!(nodes[0].node.list_channels().is_empty());
29112914
}
29122915

@@ -2993,7 +2996,7 @@ fn do_test_inbound_reload_without_init_mon(use_0conf: bool, lock_commitment: boo
29932996

29942997
reload_node!(nodes[1], &nodes[1].node.encode(), &[], persister, new_chain_monitor, nodes_1_deserialized);
29952998

2996-
check_closed_event!(nodes[1], 1, ClosureReason::DisconnectedPeer);
2999+
check_closed_event!(nodes[1], 1, ClosureReason::DisconnectedPeer, nodes[0].node.get_our_node_id(), 100000);
29973000
assert!(nodes[1].node.list_channels().is_empty());
29983001
}
29993002

lightning/src/ln/channelmanager.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -9522,7 +9522,7 @@ mod tests {
95229522
nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id()).unwrap();
95239523
check_closed_broadcast!(nodes[0], true);
95249524
check_added_monitors!(nodes[0], 1);
9525-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
9525+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, nodes[1].node.get_our_node_id(), 100000);
95269526

95279527
{
95289528
// Assert that nodes[1] is awaiting removal for nodes[0] once nodes[1] has been
@@ -9685,8 +9685,8 @@ mod tests {
96859685
}
96869686
let (_nodes_1_update, _none) = get_closing_signed_broadcast!(nodes[1].node, nodes[0].node.get_our_node_id());
96879687

9688-
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure);
9689-
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure);
9688+
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure, nodes[1].node.get_our_node_id(), 1000000);
9689+
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure, nodes[0].node.get_our_node_id(), 1000000);
96909690
}
96919691

96929692
fn check_not_connected_to_peer_error<T>(res_err: Result<T, APIError>, expected_public_key: PublicKey) {
@@ -10081,7 +10081,7 @@ mod tests {
1008110081
let open_channel_msg = get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id());
1008210082
assert!(!open_channel_msg.channel_type.unwrap().supports_anchors_zero_fee_htlc_tx());
1008310083

10084-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
10084+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, nodes[0].node.get_our_node_id(), 100000);
1008510085
}
1008610086

1008710087
#[test]

lightning/src/ln/functional_test_utils.rs

+16-6
Original file line numberDiff line numberDiff line change
@@ -1399,14 +1399,23 @@ macro_rules! check_closed_broadcast {
13991399
}
14001400

14011401
/// Check that a channel's closing channel events has been issued
1402-
pub fn check_closed_event(node: &Node, events_count: usize, expected_reason: ClosureReason, is_check_discard_funding: bool) {
1402+
pub fn check_closed_event(node: &Node, events_count: usize, expected_reason: ClosureReason, is_check_discard_funding: bool,
1403+
expected_counterparty_node_id: &PublicKey, expected_channel_capacity: u64) {
14031404
let events = node.node.get_and_clear_pending_events();
14041405
assert_eq!(events.len(), events_count, "{:?}", events);
14051406
let mut issues_discard_funding = false;
14061407
for event in events {
14071408
match event {
1408-
Event::ChannelClosed { ref reason, .. } => {
1409+
Event::ChannelClosed { ref reason, counterparty_node_id,
1410+
channel_capacity, .. } => {
14091411
assert_eq!(*reason, expected_reason);
1412+
if let Some(counterparty_node_id) = counterparty_node_id {
1413+
assert_eq!(counterparty_node_id, *expected_counterparty_node_id);
1414+
}
1415+
1416+
if let Some(channel_capacity) = channel_capacity {
1417+
assert_eq!(channel_capacity, expected_channel_capacity);
1418+
}
14101419
},
14111420
Event::DiscardFunding { .. } => {
14121421
issues_discard_funding = true;
@@ -1422,11 +1431,12 @@ pub fn check_closed_event(node: &Node, events_count: usize, expected_reason: Clo
14221431
/// Don't use this, use the identically-named function instead.
14231432
#[macro_export]
14241433
macro_rules! check_closed_event {
1425-
($node: expr, $events: expr, $reason: expr) => {
1426-
check_closed_event!($node, $events, $reason, false);
1434+
($node: expr, $events: expr, $reason: expr, $counterparty_node_id: expr, $channel_capacity: expr) => {
1435+
check_closed_event!($node, $events, $reason, false, $counterparty_node_id, $channel_capacity);
14271436
};
1428-
($node: expr, $events: expr, $reason: expr, $is_check_discard_funding: expr) => {
1429-
$crate::ln::functional_test_utils::check_closed_event(&$node, $events, $reason, $is_check_discard_funding);
1437+
($node: expr, $events: expr, $reason: expr, $is_check_discard_funding: expr, $counterparty_node_id: expr, $channel_capacity: expr) => {
1438+
$crate::ln::functional_test_utils::check_closed_event(&$node, $events, $reason,
1439+
$is_check_discard_funding, &$counterparty_node_id, $channel_capacity);
14301440
}
14311441
}
14321442

0 commit comments

Comments
 (0)