Skip to content

Commit 1ae1de9

Browse files
committed
Add next_channel_id in PaymentForwarded event
This update also includes a minor refactor. The return type of `pending_monitor_events` has been changed to a `Vec` tuple with the `OutPoint` type. This associates a `Vec` of `MonitorEvent`s with a funding outpoint. We've also renamed `source/sink_channel_id` to `prev/next_channel_id` in the favour of clarity.
1 parent 637fb88 commit 1ae1de9

12 files changed

+105
-83
lines changed

fuzz/src/chanmon_consistency.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ impl chain::Watch<EnforcingSigner> for TestChainMonitor {
148148
self.chain_monitor.update_channel(funding_txo, update)
149149
}
150150

151-
fn release_pending_monitor_events(&self) -> Vec<MonitorEvent> {
151+
fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)> {
152152
return self.chain_monitor.release_pending_monitor_events();
153153
}
154154
}

lightning/src/chain/chainmonitor.rs

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@ pub struct ChainMonitor<ChannelSigner: Sign, C: Deref, T: Deref, F: Deref, L: De
235235
persister: P,
236236
/// "User-provided" (ie persistence-completion/-failed) [`MonitorEvent`]s. These came directly
237237
/// from the user and not from a [`ChannelMonitor`].
238-
pending_monitor_events: Mutex<Vec<MonitorEvent>>,
238+
pending_monitor_events: Mutex<Vec<(OutPoint, Vec<MonitorEvent>)>>,
239239
/// The best block height seen, used as a proxy for the passage of time.
240240
highest_chain_height: AtomicUsize,
241241
}
@@ -299,7 +299,7 @@ where C::Target: chain::Filter,
299299
log_trace!(self.logger, "Finished syncing Channel Monitor for channel {}", log_funding_info!(monitor)),
300300
Err(ChannelMonitorUpdateErr::PermanentFailure) => {
301301
monitor_state.channel_perm_failed.store(true, Ordering::Release);
302-
self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateFailed(*funding_outpoint));
302+
self.pending_monitor_events.lock().unwrap().push((*funding_outpoint, vec![MonitorEvent::UpdateFailed(*funding_outpoint)]));
303303
},
304304
Err(ChannelMonitorUpdateErr::TemporaryFailure) => {
305305
log_debug!(self.logger, "Channel Monitor sync for channel {} in progress, holding events until completion!", log_funding_info!(monitor));
@@ -455,10 +455,10 @@ where C::Target: chain::Filter,
455455
// UpdateCompleted event.
456456
return Ok(());
457457
}
458-
self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateCompleted {
458+
self.pending_monitor_events.lock().unwrap().push((funding_txo, vec![MonitorEvent::UpdateCompleted {
459459
funding_txo,
460460
monitor_update_id: monitor_data.monitor.get_latest_update_id(),
461-
});
461+
}]));
462462
},
463463
MonitorUpdateId { contents: UpdateOrigin::ChainSync(_) } => {
464464
if !monitor_data.has_pending_chainsync_updates(&pending_monitor_updates) {
@@ -476,10 +476,10 @@ where C::Target: chain::Filter,
476476
/// channel_monitor_updated once with the highest ID.
477477
#[cfg(any(test, fuzzing))]
478478
pub fn force_channel_monitor_updated(&self, funding_txo: OutPoint, monitor_update_id: u64) {
479-
self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateCompleted {
479+
self.pending_monitor_events.lock().unwrap().push((funding_txo, vec![MonitorEvent::UpdateCompleted {
480480
funding_txo,
481481
monitor_update_id,
482-
});
482+
}]));
483483
}
484484

485485
#[cfg(any(test, fuzzing, feature = "_test_utils"))]
@@ -668,7 +668,7 @@ where C::Target: chain::Filter,
668668
}
669669
}
670670

671-
fn release_pending_monitor_events(&self) -> Vec<MonitorEvent> {
671+
fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)> {
672672
let mut pending_monitor_events = self.pending_monitor_events.lock().unwrap().split_off(0);
673673
for monitor_state in self.monitors.read().unwrap().values() {
674674
let is_pending_monitor_update = monitor_state.has_pending_chainsync_updates(&monitor_state.pending_monitor_updates.lock().unwrap());
@@ -694,7 +694,11 @@ where C::Target: chain::Filter,
694694
log_error!(self.logger, " To avoid funds-loss, we are allowing monitor updates to be released.");
695695
log_error!(self.logger, " This may cause duplicate payment events to be generated.");
696696
}
697-
pending_monitor_events.append(&mut monitor_state.monitor.get_and_clear_pending_monitor_events());
697+
let monitor_events = monitor_state.monitor.get_and_clear_pending_monitor_events();
698+
if monitor_events.len() > 0 {
699+
let monitor_outpoint = monitor_state.monitor.get_funding_txo().0;
700+
pending_monitor_events.push((monitor_outpoint, monitor_events));
701+
}
698702
}
699703
}
700704
pending_monitor_events

lightning/src/chain/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -291,7 +291,7 @@ pub trait Watch<ChannelSigner: Sign> {
291291
///
292292
/// For details on asynchronous [`ChannelMonitor`] updating and returning
293293
/// [`MonitorEvent::UpdateCompleted`] here, see [`ChannelMonitorUpdateErr::TemporaryFailure`].
294-
fn release_pending_monitor_events(&self) -> Vec<MonitorEvent>;
294+
fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)>;
295295
}
296296

297297
/// The `Filter` trait defines behavior for indicating chain activity of interest pertaining to

lightning/src/ln/chanmon_update_fail_tests.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1102,7 +1102,7 @@ fn test_monitor_update_fail_reestablish() {
11021102
assert!(updates.update_fee.is_none());
11031103
assert_eq!(updates.update_fulfill_htlcs.len(), 1);
11041104
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &updates.update_fulfill_htlcs[0]);
1105-
expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
1105+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
11061106
check_added_monitors!(nodes[1], 1);
11071107
assert!(nodes[1].node.get_and_clear_pending_msg_events().is_empty());
11081108
commitment_signed_dance!(nodes[1], nodes[2], updates.commitment_signed, false);
@@ -2087,7 +2087,7 @@ fn test_fail_htlc_on_broadcast_after_claim() {
20872087
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &cs_updates.update_fulfill_htlcs[0]);
20882088
let bs_updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
20892089
check_added_monitors!(nodes[1], 1);
2090-
expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
2090+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
20912091

20922092
mine_transaction(&nodes[1], &bs_txn[0]);
20932093
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
@@ -2468,7 +2468,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
24682468
assert_eq!(fulfill_msg, cs_updates.update_fulfill_htlcs[0]);
24692469
}
24702470
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &fulfill_msg);
2471-
expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
2471+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
24722472
check_added_monitors!(nodes[1], 1);
24732473

24742474
let mut bs_updates = None;

lightning/src/ln/channelmanager.rs

Lines changed: 48 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -3952,7 +3952,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
39523952
}
39533953
}
39543954

3955-
fn claim_funds_internal(&self, mut channel_state_lock: MutexGuard<ChannelHolder<Signer>>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool) {
3955+
fn claim_funds_internal(&self, mut channel_state_lock: MutexGuard<ChannelHolder<Signer>>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool, next_channel_id: [u8; 32]) {
39563956
match source {
39573957
HTLCSource::OutboundRoute { session_priv, payment_id, path, .. } => {
39583958
mem::drop(channel_state_lock);
@@ -4043,12 +4043,14 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
40434043
} else { None };
40444044

40454045
let mut pending_events = self.pending_events.lock().unwrap();
4046+
let prev_channel_id = Some(prev_outpoint.to_channel_id());
4047+
let next_channel_id = Some(next_channel_id);
40464048

4047-
let source_channel_id = Some(prev_outpoint.to_channel_id());
40484049
pending_events.push(events::Event::PaymentForwarded {
4049-
source_channel_id,
40504050
fee_earned_msat,
40514051
claim_from_onchain_tx: from_onchain,
4052+
prev_channel_id,
4053+
next_channel_id,
40524054
});
40534055
}
40544056
}
@@ -4501,7 +4503,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
45014503
hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
45024504
}
45034505
};
4504-
self.claim_funds_internal(channel_lock, htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false);
4506+
self.claim_funds_internal(channel_lock, htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false, msg.channel_id);
45054507
Ok(())
45064508
}
45074509

@@ -4821,48 +4823,50 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
48214823
let mut failed_channels = Vec::new();
48224824
let mut pending_monitor_events = self.chain_monitor.release_pending_monitor_events();
48234825
let has_pending_monitor_events = !pending_monitor_events.is_empty();
4824-
for monitor_event in pending_monitor_events.drain(..) {
4825-
match monitor_event {
4826-
MonitorEvent::HTLCEvent(htlc_update) => {
4827-
if let Some(preimage) = htlc_update.payment_preimage {
4828-
log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", log_bytes!(preimage.0));
4829-
self.claim_funds_internal(self.channel_state.lock().unwrap(), htlc_update.source, preimage, htlc_update.onchain_value_satoshis.map(|v| v * 1000), true);
4830-
} else {
4831-
log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
4832-
self.fail_htlc_backwards_internal(self.channel_state.lock().unwrap(), htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() });
4833-
}
4834-
},
4835-
MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
4836-
MonitorEvent::UpdateFailed(funding_outpoint) => {
4837-
let mut channel_lock = self.channel_state.lock().unwrap();
4838-
let channel_state = &mut *channel_lock;
4839-
let by_id = &mut channel_state.by_id;
4840-
let pending_msg_events = &mut channel_state.pending_msg_events;
4841-
if let hash_map::Entry::Occupied(chan_entry) = by_id.entry(funding_outpoint.to_channel_id()) {
4842-
let mut chan = remove_channel!(self, channel_state, chan_entry);
4843-
failed_channels.push(chan.force_shutdown(false));
4844-
if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
4845-
pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
4846-
msg: update
4826+
for (funding_outpoint, mut monitor_events) in pending_monitor_events.drain(..) {
4827+
for monitor_event in monitor_events.drain(..) {
4828+
match monitor_event {
4829+
MonitorEvent::HTLCEvent(htlc_update) => {
4830+
if let Some(preimage) = htlc_update.payment_preimage {
4831+
log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", log_bytes!(preimage.0));
4832+
self.claim_funds_internal(self.channel_state.lock().unwrap(), htlc_update.source, preimage, htlc_update.onchain_value_satoshis.map(|v| v * 1000), true, funding_outpoint.to_channel_id());
4833+
} else {
4834+
log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
4835+
self.fail_htlc_backwards_internal(self.channel_state.lock().unwrap(), htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() });
4836+
}
4837+
},
4838+
MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
4839+
MonitorEvent::UpdateFailed(funding_outpoint) => {
4840+
let mut channel_lock = self.channel_state.lock().unwrap();
4841+
let channel_state = &mut *channel_lock;
4842+
let by_id = &mut channel_state.by_id;
4843+
let pending_msg_events = &mut channel_state.pending_msg_events;
4844+
if let hash_map::Entry::Occupied(chan_entry) = by_id.entry(funding_outpoint.to_channel_id()) {
4845+
let mut chan = remove_channel!(self, channel_state, chan_entry);
4846+
failed_channels.push(chan.force_shutdown(false));
4847+
if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
4848+
pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
4849+
msg: update
4850+
});
4851+
}
4852+
let reason = if let MonitorEvent::UpdateFailed(_) = monitor_event {
4853+
ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() }
4854+
} else {
4855+
ClosureReason::CommitmentTxConfirmed
4856+
};
4857+
self.issue_channel_close_events(&chan, reason);
4858+
pending_msg_events.push(events::MessageSendEvent::HandleError {
4859+
node_id: chan.get_counterparty_node_id(),
4860+
action: msgs::ErrorAction::SendErrorMessage {
4861+
msg: msgs::ErrorMessage { channel_id: chan.channel_id(), data: "Channel force-closed".to_owned() }
4862+
},
48474863
});
48484864
}
4849-
let reason = if let MonitorEvent::UpdateFailed(_) = monitor_event {
4850-
ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() }
4851-
} else {
4852-
ClosureReason::CommitmentTxConfirmed
4853-
};
4854-
self.issue_channel_close_events(&chan, reason);
4855-
pending_msg_events.push(events::MessageSendEvent::HandleError {
4856-
node_id: chan.get_counterparty_node_id(),
4857-
action: msgs::ErrorAction::SendErrorMessage {
4858-
msg: msgs::ErrorMessage { channel_id: chan.channel_id(), data: "Channel force-closed".to_owned() }
4859-
},
4860-
});
4861-
}
4862-
},
4863-
MonitorEvent::UpdateCompleted { funding_txo, monitor_update_id } => {
4864-
self.channel_monitor_updated(&funding_txo, monitor_update_id);
4865-
},
4865+
},
4866+
MonitorEvent::UpdateCompleted { funding_txo, monitor_update_id } => {
4867+
self.channel_monitor_updated(&funding_txo, monitor_update_id);
4868+
},
4869+
}
48664870
}
48674871
}
48684872

lightning/src/ln/functional_test_utils.rs

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1327,15 +1327,20 @@ macro_rules! expect_payment_path_successful {
13271327
}
13281328

13291329
macro_rules! expect_payment_forwarded {
1330-
($node: expr, $source_node: expr, $expected_fee: expr, $upstream_force_closed: expr) => {
1330+
($node: expr, $prev_node: expr, $next_node: expr, $expected_fee: expr, $upstream_force_closed: expr, $downstream_force_closed: expr) => {
13311331
let events = $node.node.get_and_clear_pending_events();
13321332
assert_eq!(events.len(), 1);
13331333
match events[0] {
1334-
Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
1334+
Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
13351335
assert_eq!(fee_earned_msat, $expected_fee);
13361336
if fee_earned_msat.is_some() {
1337-
// Is the event channel_id in one of the channels between the two nodes?
1338-
assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $source_node.node.get_our_node_id() && x.channel_id == source_channel_id.unwrap()));
1337+
// Is the event prev_channel_id in one of the channels between the two nodes?
1338+
assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $prev_node.node.get_our_node_id() && x.channel_id == prev_channel_id.unwrap()));
1339+
}
1340+
// We check for force closures since a force closed channel is removed from the
1341+
// node's channel list
1342+
if !$downstream_force_closed {
1343+
assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $next_node.node.get_our_node_id() && x.channel_id == next_channel_id.unwrap()));
13391344
}
13401345
assert_eq!(claim_from_onchain_tx, $upstream_force_closed);
13411346
},
@@ -1579,7 +1584,7 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
15791584
{
15801585
$node.node.handle_update_fulfill_htlc(&$prev_node.node.get_our_node_id(), &next_msgs.as_ref().unwrap().0);
15811586
let fee = $node.node.channel_state.lock().unwrap().by_id.get(&next_msgs.as_ref().unwrap().0.channel_id).unwrap().config.forwarding_fee_base_msat;
1582-
expect_payment_forwarded!($node, $next_node, Some(fee as u64), false);
1587+
expect_payment_forwarded!($node, $next_node, $prev_node, Some(fee as u64), false, false);
15831588
expected_total_fee_msat += fee as u64;
15841589
check_added_monitors!($node, 1);
15851590
let new_next_msgs = if $new_msgs {

lightning/src/ln/functional_tests.rs

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2686,18 +2686,20 @@ fn test_htlc_on_chain_success() {
26862686
}
26872687
let chan_id = Some(chan_1.2);
26882688
match forwarded_events[1] {
2689-
Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
2689+
Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
26902690
assert_eq!(fee_earned_msat, Some(1000));
2691-
assert_eq!(source_channel_id, chan_id);
2691+
assert_eq!(prev_channel_id, chan_id);
26922692
assert_eq!(claim_from_onchain_tx, true);
2693+
assert_eq!(next_channel_id, Some(chan_2.2));
26932694
},
26942695
_ => panic!()
26952696
}
26962697
match forwarded_events[2] {
2697-
Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
2698+
Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
26982699
assert_eq!(fee_earned_msat, Some(1000));
2699-
assert_eq!(source_channel_id, chan_id);
2700+
assert_eq!(prev_channel_id, chan_id);
27002701
assert_eq!(claim_from_onchain_tx, true);
2702+
assert_eq!(next_channel_id, Some(chan_2.2));
27012703
},
27022704
_ => panic!()
27032705
}
@@ -5117,10 +5119,11 @@ fn test_onchain_to_onchain_claim() {
51175119
_ => panic!("Unexpected event"),
51185120
}
51195121
match events[1] {
5120-
Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
5122+
Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
51215123
assert_eq!(fee_earned_msat, Some(1000));
5122-
assert_eq!(source_channel_id, Some(chan_1.2));
5124+
assert_eq!(prev_channel_id, Some(chan_1.2));
51235125
assert_eq!(claim_from_onchain_tx, true);
5126+
assert_eq!(next_channel_id, Some(chan_2.2));
51245127
},
51255128
_ => panic!("Unexpected event"),
51265129
}
@@ -5287,7 +5290,7 @@ fn test_duplicate_payment_hash_one_failure_one_success() {
52875290
// Note that the fee paid is effectively double as the HTLC value (including the nodes[1] fee
52885291
// and nodes[2] fee) is rounded down and then claimed in full.
52895292
mine_transaction(&nodes[1], &htlc_success_txn[0]);
5290-
expect_payment_forwarded!(nodes[1], nodes[0], Some(196*2), true);
5293+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(196*2), true, true);
52915294
let updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
52925295
assert!(updates.update_add_htlcs.is_empty());
52935296
assert!(updates.update_fail_htlcs.is_empty());
@@ -8869,7 +8872,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
88698872
assert_eq!(carol_updates.update_fulfill_htlcs.len(), 1);
88708873

88718874
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &carol_updates.update_fulfill_htlcs[0]);
8872-
expect_payment_forwarded!(nodes[1], nodes[0], if go_onchain_before_fulfill || force_closing_node == 1 { None } else { Some(1000) }, false);
8875+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], if go_onchain_before_fulfill || force_closing_node == 1 { None } else { Some(1000) }, false, false);
88738876
// If Alice broadcasted but Bob doesn't know yet, here he prepares to tell her about the preimage.
88748877
if !go_onchain_before_fulfill && broadcast_alice {
88758878
let events = nodes[1].node.get_and_clear_pending_msg_events();

lightning/src/ln/payment_tests.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -495,7 +495,7 @@ fn do_retry_with_no_persist(confirm_before_reload: bool) {
495495
let bs_htlc_claim_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
496496
assert_eq!(bs_htlc_claim_txn.len(), 1);
497497
check_spends!(bs_htlc_claim_txn[0], as_commitment_tx);
498-
expect_payment_forwarded!(nodes[1], nodes[0], None, false);
498+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], None, false, false);
499499

500500
if !confirm_before_reload {
501501
mine_transaction(&nodes[0], &as_commitment_tx);

lightning/src/ln/reorg_tests.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ fn do_test_onchain_htlc_reorg(local_commitment: bool, claim: bool) {
138138
// ChannelManager only polls chain::Watch::release_pending_monitor_events when we
139139
// probe it for events, so we probe non-message events here (which should just be the
140140
// PaymentForwarded event).
141-
expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), true);
141+
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), true, true);
142142
} else {
143143
// Confirm the timeout tx and check that we fail the HTLC backwards
144144
let block = Block {

0 commit comments

Comments
 (0)