Skip to content

Commit 257a6f3

Browse files
Merge pull request #1475 from atalw/2022-04-paymentforwarded-event
Expose `next_channel_id` in `PaymentForwarded` event
2 parents e5c988e + 1ae1de9 commit 257a6f3

12 files changed

+105
-83
lines changed

fuzz/src/chanmon_consistency.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ impl chain::Watch<EnforcingSigner> for TestChainMonitor {
148148
self.chain_monitor.update_channel(funding_txo, update)
149149
}
150150

151-
fn release_pending_monitor_events(&self) -> Vec<MonitorEvent> {
151+
fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)> {
152152
return self.chain_monitor.release_pending_monitor_events();
153153
}
154154
}

lightning/src/chain/chainmonitor.rs

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@ pub struct ChainMonitor<ChannelSigner: Sign, C: Deref, T: Deref, F: Deref, L: De
235235
persister: P,
236236
/// "User-provided" (ie persistence-completion/-failed) [`MonitorEvent`]s. These came directly
237237
/// from the user and not from a [`ChannelMonitor`].
238-
pending_monitor_events: Mutex<Vec<MonitorEvent>>,
238+
pending_monitor_events: Mutex<Vec<(OutPoint, Vec<MonitorEvent>)>>,
239239
/// The best block height seen, used as a proxy for the passage of time.
240240
highest_chain_height: AtomicUsize,
241241
}
@@ -299,7 +299,7 @@ where C::Target: chain::Filter,
299299
log_trace!(self.logger, "Finished syncing Channel Monitor for channel {}", log_funding_info!(monitor)),
300300
Err(ChannelMonitorUpdateErr::PermanentFailure) => {
301301
monitor_state.channel_perm_failed.store(true, Ordering::Release);
302-
self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateFailed(*funding_outpoint));
302+
self.pending_monitor_events.lock().unwrap().push((*funding_outpoint, vec![MonitorEvent::UpdateFailed(*funding_outpoint)]));
303303
},
304304
Err(ChannelMonitorUpdateErr::TemporaryFailure) => {
305305
log_debug!(self.logger, "Channel Monitor sync for channel {} in progress, holding events until completion!", log_funding_info!(monitor));
@@ -455,10 +455,10 @@ where C::Target: chain::Filter,
455455
// UpdateCompleted event.
456456
return Ok(());
457457
}
458-
self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateCompleted {
458+
self.pending_monitor_events.lock().unwrap().push((funding_txo, vec![MonitorEvent::UpdateCompleted {
459459
funding_txo,
460460
monitor_update_id: monitor_data.monitor.get_latest_update_id(),
461-
});
461+
}]));
462462
},
463463
MonitorUpdateId { contents: UpdateOrigin::ChainSync(_) } => {
464464
if !monitor_data.has_pending_chainsync_updates(&pending_monitor_updates) {
@@ -476,10 +476,10 @@ where C::Target: chain::Filter,
476476
/// channel_monitor_updated once with the highest ID.
477477
#[cfg(any(test, fuzzing))]
478478
pub fn force_channel_monitor_updated(&self, funding_txo: OutPoint, monitor_update_id: u64) {
479-
self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateCompleted {
479+
self.pending_monitor_events.lock().unwrap().push((funding_txo, vec![MonitorEvent::UpdateCompleted {
480480
funding_txo,
481481
monitor_update_id,
482-
});
482+
}]));
483483
}
484484

485485
#[cfg(any(test, fuzzing, feature = "_test_utils"))]
@@ -666,7 +666,7 @@ where C::Target: chain::Filter,
666666
}
667667
}
668668

669-
fn release_pending_monitor_events(&self) -> Vec<MonitorEvent> {
669+
fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)> {
670670
let mut pending_monitor_events = self.pending_monitor_events.lock().unwrap().split_off(0);
671671
for monitor_state in self.monitors.read().unwrap().values() {
672672
let is_pending_monitor_update = monitor_state.has_pending_chainsync_updates(&monitor_state.pending_monitor_updates.lock().unwrap());
@@ -692,7 +692,11 @@ where C::Target: chain::Filter,
692692
log_error!(self.logger, " To avoid funds-loss, we are allowing monitor updates to be released.");
693693
log_error!(self.logger, " This may cause duplicate payment events to be generated.");
694694
}
695-
pending_monitor_events.append(&mut monitor_state.monitor.get_and_clear_pending_monitor_events());
695+
let monitor_events = monitor_state.monitor.get_and_clear_pending_monitor_events();
696+
if monitor_events.len() > 0 {
697+
let monitor_outpoint = monitor_state.monitor.get_funding_txo().0;
698+
pending_monitor_events.push((monitor_outpoint, monitor_events));
699+
}
696700
}
697701
}
698702
pending_monitor_events

lightning/src/chain/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,7 @@ pub trait Watch<ChannelSigner: Sign> {
302302
///
303303
/// For details on asynchronous [`ChannelMonitor`] updating and returning
304304
/// [`MonitorEvent::UpdateCompleted`] here, see [`ChannelMonitorUpdateErr::TemporaryFailure`].
305-
fn release_pending_monitor_events(&self) -> Vec<MonitorEvent>;
305+
fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)>;
306306
}
307307

308308
/// The `Filter` trait defines behavior for indicating chain activity of interest pertaining to

lightning/src/ln/chanmon_update_fail_tests.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1102,7 +1102,7 @@ fn test_monitor_update_fail_reestablish() {
11021102
assert!(updates.update_fee.is_none());
11031103
assert_eq!(updates.update_fulfill_htlcs.len(), 1);
11041104
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &updates.update_fulfill_htlcs[0]);
1105-
expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
1105+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
11061106
check_added_monitors!(nodes[1], 1);
11071107
assert!(nodes[1].node.get_and_clear_pending_msg_events().is_empty());
11081108
commitment_signed_dance!(nodes[1], nodes[2], updates.commitment_signed, false);
@@ -2087,7 +2087,7 @@ fn test_fail_htlc_on_broadcast_after_claim() {
20872087
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &cs_updates.update_fulfill_htlcs[0]);
20882088
let bs_updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
20892089
check_added_monitors!(nodes[1], 1);
2090-
expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
2090+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
20912091

20922092
mine_transaction(&nodes[1], &bs_txn[0]);
20932093
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
@@ -2468,7 +2468,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
24682468
assert_eq!(fulfill_msg, cs_updates.update_fulfill_htlcs[0]);
24692469
}
24702470
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &fulfill_msg);
2471-
expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
2471+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
24722472
check_added_monitors!(nodes[1], 1);
24732473

24742474
let mut bs_updates = None;

lightning/src/ln/channelmanager.rs

Lines changed: 48 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -3958,7 +3958,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
39583958
}
39593959
}
39603960

3961-
fn claim_funds_internal(&self, mut channel_state_lock: MutexGuard<ChannelHolder<Signer>>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool) {
3961+
fn claim_funds_internal(&self, mut channel_state_lock: MutexGuard<ChannelHolder<Signer>>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool, next_channel_id: [u8; 32]) {
39623962
match source {
39633963
HTLCSource::OutboundRoute { session_priv, payment_id, path, .. } => {
39643964
mem::drop(channel_state_lock);
@@ -4049,12 +4049,14 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
40494049
} else { None };
40504050

40514051
let mut pending_events = self.pending_events.lock().unwrap();
4052+
let prev_channel_id = Some(prev_outpoint.to_channel_id());
4053+
let next_channel_id = Some(next_channel_id);
40524054

4053-
let source_channel_id = Some(prev_outpoint.to_channel_id());
40544055
pending_events.push(events::Event::PaymentForwarded {
4055-
source_channel_id,
40564056
fee_earned_msat,
40574057
claim_from_onchain_tx: from_onchain,
4058+
prev_channel_id,
4059+
next_channel_id,
40584060
});
40594061
}
40604062
}
@@ -4507,7 +4509,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
45074509
hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
45084510
}
45094511
};
4510-
self.claim_funds_internal(channel_lock, htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false);
4512+
self.claim_funds_internal(channel_lock, htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false, msg.channel_id);
45114513
Ok(())
45124514
}
45134515

@@ -4827,48 +4829,50 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
48274829
let mut failed_channels = Vec::new();
48284830
let mut pending_monitor_events = self.chain_monitor.release_pending_monitor_events();
48294831
let has_pending_monitor_events = !pending_monitor_events.is_empty();
4830-
for monitor_event in pending_monitor_events.drain(..) {
4831-
match monitor_event {
4832-
MonitorEvent::HTLCEvent(htlc_update) => {
4833-
if let Some(preimage) = htlc_update.payment_preimage {
4834-
log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", log_bytes!(preimage.0));
4835-
self.claim_funds_internal(self.channel_state.lock().unwrap(), htlc_update.source, preimage, htlc_update.onchain_value_satoshis.map(|v| v * 1000), true);
4836-
} else {
4837-
log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
4838-
self.fail_htlc_backwards_internal(self.channel_state.lock().unwrap(), htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() });
4839-
}
4840-
},
4841-
MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
4842-
MonitorEvent::UpdateFailed(funding_outpoint) => {
4843-
let mut channel_lock = self.channel_state.lock().unwrap();
4844-
let channel_state = &mut *channel_lock;
4845-
let by_id = &mut channel_state.by_id;
4846-
let pending_msg_events = &mut channel_state.pending_msg_events;
4847-
if let hash_map::Entry::Occupied(chan_entry) = by_id.entry(funding_outpoint.to_channel_id()) {
4848-
let mut chan = remove_channel!(self, channel_state, chan_entry);
4849-
failed_channels.push(chan.force_shutdown(false));
4850-
if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
4851-
pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
4852-
msg: update
4832+
for (funding_outpoint, mut monitor_events) in pending_monitor_events.drain(..) {
4833+
for monitor_event in monitor_events.drain(..) {
4834+
match monitor_event {
4835+
MonitorEvent::HTLCEvent(htlc_update) => {
4836+
if let Some(preimage) = htlc_update.payment_preimage {
4837+
log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", log_bytes!(preimage.0));
4838+
self.claim_funds_internal(self.channel_state.lock().unwrap(), htlc_update.source, preimage, htlc_update.onchain_value_satoshis.map(|v| v * 1000), true, funding_outpoint.to_channel_id());
4839+
} else {
4840+
log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
4841+
self.fail_htlc_backwards_internal(self.channel_state.lock().unwrap(), htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() });
4842+
}
4843+
},
4844+
MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
4845+
MonitorEvent::UpdateFailed(funding_outpoint) => {
4846+
let mut channel_lock = self.channel_state.lock().unwrap();
4847+
let channel_state = &mut *channel_lock;
4848+
let by_id = &mut channel_state.by_id;
4849+
let pending_msg_events = &mut channel_state.pending_msg_events;
4850+
if let hash_map::Entry::Occupied(chan_entry) = by_id.entry(funding_outpoint.to_channel_id()) {
4851+
let mut chan = remove_channel!(self, channel_state, chan_entry);
4852+
failed_channels.push(chan.force_shutdown(false));
4853+
if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
4854+
pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
4855+
msg: update
4856+
});
4857+
}
4858+
let reason = if let MonitorEvent::UpdateFailed(_) = monitor_event {
4859+
ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() }
4860+
} else {
4861+
ClosureReason::CommitmentTxConfirmed
4862+
};
4863+
self.issue_channel_close_events(&chan, reason);
4864+
pending_msg_events.push(events::MessageSendEvent::HandleError {
4865+
node_id: chan.get_counterparty_node_id(),
4866+
action: msgs::ErrorAction::SendErrorMessage {
4867+
msg: msgs::ErrorMessage { channel_id: chan.channel_id(), data: "Channel force-closed".to_owned() }
4868+
},
48534869
});
48544870
}
4855-
let reason = if let MonitorEvent::UpdateFailed(_) = monitor_event {
4856-
ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() }
4857-
} else {
4858-
ClosureReason::CommitmentTxConfirmed
4859-
};
4860-
self.issue_channel_close_events(&chan, reason);
4861-
pending_msg_events.push(events::MessageSendEvent::HandleError {
4862-
node_id: chan.get_counterparty_node_id(),
4863-
action: msgs::ErrorAction::SendErrorMessage {
4864-
msg: msgs::ErrorMessage { channel_id: chan.channel_id(), data: "Channel force-closed".to_owned() }
4865-
},
4866-
});
4867-
}
4868-
},
4869-
MonitorEvent::UpdateCompleted { funding_txo, monitor_update_id } => {
4870-
self.channel_monitor_updated(&funding_txo, monitor_update_id);
4871-
},
4871+
},
4872+
MonitorEvent::UpdateCompleted { funding_txo, monitor_update_id } => {
4873+
self.channel_monitor_updated(&funding_txo, monitor_update_id);
4874+
},
4875+
}
48724876
}
48734877
}
48744878

lightning/src/ln/functional_test_utils.rs

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1334,15 +1334,20 @@ macro_rules! expect_payment_path_successful {
13341334
}
13351335

13361336
macro_rules! expect_payment_forwarded {
1337-
($node: expr, $source_node: expr, $expected_fee: expr, $upstream_force_closed: expr) => {
1337+
($node: expr, $prev_node: expr, $next_node: expr, $expected_fee: expr, $upstream_force_closed: expr, $downstream_force_closed: expr) => {
13381338
let events = $node.node.get_and_clear_pending_events();
13391339
assert_eq!(events.len(), 1);
13401340
match events[0] {
1341-
Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
1341+
Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
13421342
assert_eq!(fee_earned_msat, $expected_fee);
13431343
if fee_earned_msat.is_some() {
1344-
// Is the event channel_id in one of the channels between the two nodes?
1345-
assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $source_node.node.get_our_node_id() && x.channel_id == source_channel_id.unwrap()));
1344+
// Is the event prev_channel_id in one of the channels between the two nodes?
1345+
assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $prev_node.node.get_our_node_id() && x.channel_id == prev_channel_id.unwrap()));
1346+
}
1347+
// We check for force closures since a force closed channel is removed from the
1348+
// node's channel list
1349+
if !$downstream_force_closed {
1350+
assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $next_node.node.get_our_node_id() && x.channel_id == next_channel_id.unwrap()));
13461351
}
13471352
assert_eq!(claim_from_onchain_tx, $upstream_force_closed);
13481353
},
@@ -1586,7 +1591,7 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
15861591
{
15871592
$node.node.handle_update_fulfill_htlc(&$prev_node.node.get_our_node_id(), &next_msgs.as_ref().unwrap().0);
15881593
let fee = $node.node.channel_state.lock().unwrap().by_id.get(&next_msgs.as_ref().unwrap().0.channel_id).unwrap().config.forwarding_fee_base_msat;
1589-
expect_payment_forwarded!($node, $next_node, Some(fee as u64), false);
1594+
expect_payment_forwarded!($node, $next_node, $prev_node, Some(fee as u64), false, false);
15901595
expected_total_fee_msat += fee as u64;
15911596
check_added_monitors!($node, 1);
15921597
let new_next_msgs = if $new_msgs {

lightning/src/ln/functional_tests.rs

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2708,18 +2708,20 @@ fn test_htlc_on_chain_success() {
27082708
}
27092709
let chan_id = Some(chan_1.2);
27102710
match forwarded_events[1] {
2711-
Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
2711+
Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
27122712
assert_eq!(fee_earned_msat, Some(1000));
2713-
assert_eq!(source_channel_id, chan_id);
2713+
assert_eq!(prev_channel_id, chan_id);
27142714
assert_eq!(claim_from_onchain_tx, true);
2715+
assert_eq!(next_channel_id, Some(chan_2.2));
27152716
},
27162717
_ => panic!()
27172718
}
27182719
match forwarded_events[2] {
2719-
Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
2720+
Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
27202721
assert_eq!(fee_earned_msat, Some(1000));
2721-
assert_eq!(source_channel_id, chan_id);
2722+
assert_eq!(prev_channel_id, chan_id);
27222723
assert_eq!(claim_from_onchain_tx, true);
2724+
assert_eq!(next_channel_id, Some(chan_2.2));
27232725
},
27242726
_ => panic!()
27252727
}
@@ -5180,10 +5182,11 @@ fn test_onchain_to_onchain_claim() {
51805182
_ => panic!("Unexpected event"),
51815183
}
51825184
match events[1] {
5183-
Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
5185+
Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
51845186
assert_eq!(fee_earned_msat, Some(1000));
5185-
assert_eq!(source_channel_id, Some(chan_1.2));
5187+
assert_eq!(prev_channel_id, Some(chan_1.2));
51865188
assert_eq!(claim_from_onchain_tx, true);
5189+
assert_eq!(next_channel_id, Some(chan_2.2));
51875190
},
51885191
_ => panic!("Unexpected event"),
51895192
}
@@ -5350,7 +5353,7 @@ fn test_duplicate_payment_hash_one_failure_one_success() {
53505353
// Note that the fee paid is effectively double as the HTLC value (including the nodes[1] fee
53515354
// and nodes[2] fee) is rounded down and then claimed in full.
53525355
mine_transaction(&nodes[1], &htlc_success_txn[0]);
5353-
expect_payment_forwarded!(nodes[1], nodes[0], Some(196*2), true);
5356+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(196*2), true, true);
53545357
let updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
53555358
assert!(updates.update_add_htlcs.is_empty());
53565359
assert!(updates.update_fail_htlcs.is_empty());
@@ -9010,7 +9013,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
90109013
assert_eq!(carol_updates.update_fulfill_htlcs.len(), 1);
90119014

90129015
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &carol_updates.update_fulfill_htlcs[0]);
9013-
expect_payment_forwarded!(nodes[1], nodes[0], if go_onchain_before_fulfill || force_closing_node == 1 { None } else { Some(1000) }, false);
9016+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], if go_onchain_before_fulfill || force_closing_node == 1 { None } else { Some(1000) }, false, false);
90149017
// If Alice broadcasted but Bob doesn't know yet, here he prepares to tell her about the preimage.
90159018
if !go_onchain_before_fulfill && broadcast_alice {
90169019
let events = nodes[1].node.get_and_clear_pending_msg_events();

lightning/src/ln/payment_tests.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -495,7 +495,7 @@ fn do_retry_with_no_persist(confirm_before_reload: bool) {
495495
let bs_htlc_claim_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
496496
assert_eq!(bs_htlc_claim_txn.len(), 1);
497497
check_spends!(bs_htlc_claim_txn[0], as_commitment_tx);
498-
expect_payment_forwarded!(nodes[1], nodes[0], None, false);
498+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], None, false, false);
499499

500500
if !confirm_before_reload {
501501
mine_transaction(&nodes[0], &as_commitment_tx);

lightning/src/ln/reorg_tests.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ fn do_test_onchain_htlc_reorg(local_commitment: bool, claim: bool) {
138138
// ChannelManager only polls chain::Watch::release_pending_monitor_events when we
139139
// probe it for events, so we probe non-message events here (which should just be the
140140
// PaymentForwarded event).
141-
expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), true);
141+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), true, true);
142142
} else {
143143
// Confirm the timeout tx and check that we fail the HTLC backwards
144144
let block = Block {

0 commit comments

Comments
 (0)