Skip to content

Commit 4d66cce

Browse files
authored
Merge pull request #2769 from TheBlueMatt/2023-12-2314-cleanups-3
Post-#2314 Cleanups #3
2 parents 1eddd1e + cf0539e commit 4d66cce

File tree

3 files changed

+55
-18
lines changed

3 files changed

+55
-18
lines changed

lightning/src/ln/channel.rs

+10
Original file line numberDiff line numberDiff line change
@@ -599,6 +599,8 @@ pub(crate) struct ShutdownResult {
599599
/// An unbroadcasted batch funding transaction id. The closure of this channel should be
600600
/// propagated to the remainder of the batch.
601601
pub(crate) unbroadcasted_batch_funding_txid: Option<Txid>,
602+
pub(crate) channel_id: ChannelId,
603+
pub(crate) counterparty_node_id: PublicKey,
602604
}
603605

604606
/// If the majority of the channels funds are to the fundee and the initiator holds only just
@@ -2164,6 +2166,8 @@ impl<SP: Deref> ChannelContext<SP> where SP::Target: SignerProvider {
21642166
monitor_update,
21652167
dropped_outbound_htlcs,
21662168
unbroadcasted_batch_funding_txid,
2169+
channel_id: self.channel_id,
2170+
counterparty_node_id: self.counterparty_node_id,
21672171
}
21682172
}
21692173

@@ -4707,6 +4711,8 @@ impl<SP: Deref> Channel<SP> where
47074711
monitor_update: None,
47084712
dropped_outbound_htlcs: Vec::new(),
47094713
unbroadcasted_batch_funding_txid: self.context.unbroadcasted_batch_funding_txid(),
4714+
channel_id: self.context.channel_id,
4715+
counterparty_node_id: self.context.counterparty_node_id,
47104716
};
47114717
let tx = self.build_signed_closing_transaction(&mut closing_tx, &msg.signature, &sig);
47124718
self.context.channel_state = ChannelState::ShutdownComplete as u32;
@@ -4735,6 +4741,8 @@ impl<SP: Deref> Channel<SP> where
47354741
monitor_update: None,
47364742
dropped_outbound_htlcs: Vec::new(),
47374743
unbroadcasted_batch_funding_txid: self.context.unbroadcasted_batch_funding_txid(),
4744+
channel_id: self.context.channel_id,
4745+
counterparty_node_id: self.context.counterparty_node_id,
47384746
};
47394747
self.context.channel_state = ChannelState::ShutdownComplete as u32;
47404748
self.context.update_time_counter += 1;
@@ -5913,6 +5921,8 @@ impl<SP: Deref> Channel<SP> where
59135921
monitor_update: None,
59145922
dropped_outbound_htlcs: Vec::new(),
59155923
unbroadcasted_batch_funding_txid: self.context.unbroadcasted_batch_funding_txid(),
5924+
channel_id: self.context.channel_id,
5925+
counterparty_node_id: self.context.counterparty_node_id,
59165926
};
59175927
self.context.channel_state = ChannelState::ShutdownComplete as u32;
59185928
Some(shutdown_result)

lightning/src/ln/channelmanager.rs

+41-18
Original file line numberDiff line numberDiff line change
@@ -2808,7 +2808,10 @@ where
28082808
debug_assert_ne!(peer.held_by_thread(), LockHeldState::HeldByThread);
28092809
}
28102810

2811-
log_debug!(self.logger, "Finishing closure of channel with {} HTLCs to fail", shutdown_res.dropped_outbound_htlcs.len());
2811+
let logger = WithContext::from(
2812+
&self.logger, Some(shutdown_res.counterparty_node_id), Some(shutdown_res.channel_id),
2813+
);
2814+
log_debug!(logger, "Finishing closure of channel with {} HTLCs to fail", shutdown_res.dropped_outbound_htlcs.len());
28122815
for htlc_source in shutdown_res.dropped_outbound_htlcs.drain(..) {
28132816
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
28142817
let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
@@ -2967,7 +2970,7 @@ where
29672970
}
29682971

29692972
fn decode_update_add_htlc_onion(
2970-
&self, msg: &msgs::UpdateAddHTLC
2973+
&self, msg: &msgs::UpdateAddHTLC, counterparty_node_id: &PublicKey,
29712974
) -> Result<
29722975
(onion_utils::Hop, [u8; 32], Option<Result<PublicKey, secp256k1::Error>>), HTLCFailureMsg
29732976
> {
@@ -2986,7 +2989,7 @@ where
29862989
($msg: expr, $err_code: expr, $data: expr) => {
29872990
{
29882991
log_info!(
2989-
WithContext::from(&self.logger, None, Some(msg.channel_id)),
2992+
WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id)),
29902993
"Failed to accept/forward incoming HTLC: {}", $msg
29912994
);
29922995
let (err_code, err_data) = if is_blinded {
@@ -3133,13 +3136,15 @@ where
31333136
}
31343137

31353138
fn construct_pending_htlc_status<'a>(
3136-
&self, msg: &msgs::UpdateAddHTLC, shared_secret: [u8; 32], decoded_hop: onion_utils::Hop,
3137-
allow_underpay: bool, next_packet_pubkey_opt: Option<Result<PublicKey, secp256k1::Error>>
3139+
&self, msg: &msgs::UpdateAddHTLC, counterparty_node_id: &PublicKey, shared_secret: [u8; 32],
3140+
decoded_hop: onion_utils::Hop, allow_underpay: bool,
3141+
next_packet_pubkey_opt: Option<Result<PublicKey, secp256k1::Error>>,
31383142
) -> PendingHTLCStatus {
31393143
macro_rules! return_err {
31403144
($msg: expr, $err_code: expr, $data: expr) => {
31413145
{
3142-
log_info!(WithContext::from(&self.logger, None, Some(msg.channel_id)), "Failed to accept/forward incoming HTLC: {}", $msg);
3146+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id));
3147+
log_info!(logger, "Failed to accept/forward incoming HTLC: {}", $msg);
31433148
return PendingHTLCStatus::Fail(HTLCFailureMsg::Relay(msgs::UpdateFailHTLC {
31443149
channel_id: msg.channel_id,
31453150
htlc_id: msg.htlc_id,
@@ -3276,23 +3281,33 @@ where
32763281
} = args;
32773282
// The top-level caller should hold the total_consistency_lock read lock.
32783283
debug_assert!(self.total_consistency_lock.try_write().is_err());
3279-
log_trace!(WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None),
3280-
"Attempting to send payment with payment hash {} along path with next hop {}",
3281-
payment_hash, path.hops.first().unwrap().short_channel_id);
32823284
let prng_seed = self.entropy_source.get_secure_random_bytes();
32833285
let session_priv = SecretKey::from_slice(&session_priv_bytes[..]).expect("RNG is busted");
32843286

32853287
let (onion_packet, htlc_msat, htlc_cltv) = onion_utils::create_payment_onion(
32863288
&self.secp_ctx, &path, &session_priv, total_value, recipient_onion, cur_height,
32873289
payment_hash, keysend_preimage, prng_seed
3288-
)?;
3290+
).map_err(|e| {
3291+
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None);
3292+
log_error!(logger, "Failed to build an onion for path for payment hash {}", payment_hash);
3293+
e
3294+
})?;
32893295

32903296
let err: Result<(), _> = loop {
32913297
let (counterparty_node_id, id) = match self.short_to_chan_info.read().unwrap().get(&path.hops.first().unwrap().short_channel_id) {
3292-
None => return Err(APIError::ChannelUnavailable{err: "No channel available with first hop!".to_owned()}),
3298+
None => {
3299+
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None);
3300+
log_error!(logger, "Failed to find first-hop for payment hash {}", payment_hash);
3301+
return Err(APIError::ChannelUnavailable{err: "No channel available with first hop!".to_owned()})
3302+
},
32933303
Some((cp_id, chan_id)) => (cp_id.clone(), chan_id.clone()),
32943304
};
32953305

3306+
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(id));
3307+
log_trace!(logger,
3308+
"Attempting to send payment with payment hash {} along path with next hop {}",
3309+
payment_hash, path.hops.first().unwrap().short_channel_id);
3310+
32963311
let per_peer_state = self.per_peer_state.read().unwrap();
32973312
let peer_state_mutex = per_peer_state.get(&counterparty_node_id)
32983313
.ok_or_else(|| APIError::ChannelUnavailable{err: "No peer matching the path's first hop found!".to_owned() })?;
@@ -4044,7 +4059,8 @@ where
40444059
None => {
40454060
let error = format!("Channel with id {} not found for the passed counterparty node_id {}",
40464061
next_hop_channel_id, next_node_id);
4047-
log_error!(self.logger, "{} when attempting to forward intercepted HTLC", error);
4062+
let logger = WithContext::from(&self.logger, Some(next_node_id), Some(*next_hop_channel_id));
4063+
log_error!(logger, "{} when attempting to forward intercepted HTLC", error);
40484064
return Err(APIError::ChannelUnavailable {
40494065
err: error
40504066
})
@@ -4132,6 +4148,7 @@ where
41324148

41334149
for (short_chan_id, mut pending_forwards) in forward_htlcs {
41344150
if short_chan_id != 0 {
4151+
let mut forwarding_counterparty = None;
41354152
macro_rules! forwarding_channel_not_found {
41364153
() => {
41374154
for forward_info in pending_forwards.drain(..) {
@@ -4145,7 +4162,8 @@ where
41454162
}) => {
41464163
macro_rules! failure_handler {
41474164
($msg: expr, $err_code: expr, $err_data: expr, $phantom_ss: expr, $next_hop_unknown: expr) => {
4148-
log_info!(self.logger, "Failed to accept/forward incoming HTLC: {}", $msg);
4165+
let logger = WithContext::from(&self.logger, forwarding_counterparty, Some(prev_funding_outpoint.to_channel_id()));
4166+
log_info!(logger, "Failed to accept/forward incoming HTLC: {}", $msg);
41494167

41504168
let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
41514169
short_channel_id: prev_short_channel_id,
@@ -4244,6 +4262,7 @@ where
42444262
continue;
42454263
}
42464264
};
4265+
forwarding_counterparty = Some(counterparty_node_id);
42474266
let per_peer_state = self.per_peer_state.read().unwrap();
42484267
let peer_state_mutex_opt = per_peer_state.get(&counterparty_node_id);
42494268
if peer_state_mutex_opt.is_none() {
@@ -5347,6 +5366,7 @@ where
53475366
}
53485367
if valid_mpp {
53495368
for htlc in sources.drain(..) {
5369+
let prev_hop_chan_id = htlc.prev_hop.outpoint.to_channel_id();
53505370
if let Err((pk, err)) = self.claim_funds_from_hop(
53515371
htlc.prev_hop, payment_preimage,
53525372
|_, definitely_duplicate| {
@@ -5357,6 +5377,7 @@ where
53575377
if let msgs::ErrorAction::IgnoreError = err.err.action {
53585378
// We got a temporary failure updating monitor, but will claim the
53595379
// HTLC when the monitor updating is restored (or on chain).
5380+
let logger = WithContext::from(&self.logger, None, Some(prev_hop_chan_id));
53605381
log_error!(self.logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
53615382
} else { errs.push((pk, err)); }
53625383
}
@@ -6455,7 +6476,7 @@ where
64556476
// Note that the ChannelManager is NOT re-persisted on disk after this (unless we error
64566477
// closing a channel), so any changes are likely to be lost on restart!
64576478

6458-
let decoded_hop_res = self.decode_update_add_htlc_onion(msg);
6479+
let decoded_hop_res = self.decode_update_add_htlc_onion(msg, counterparty_node_id);
64596480
let per_peer_state = self.per_peer_state.read().unwrap();
64606481
let peer_state_mutex = per_peer_state.get(counterparty_node_id)
64616482
.ok_or_else(|| {
@@ -6469,8 +6490,10 @@ where
64696490
if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
64706491
let pending_forward_info = match decoded_hop_res {
64716492
Ok((next_hop, shared_secret, next_packet_pk_opt)) =>
6472-
self.construct_pending_htlc_status(msg, shared_secret, next_hop,
6473-
chan.context.config().accept_underpaying_htlcs, next_packet_pk_opt),
6493+
self.construct_pending_htlc_status(
6494+
msg, counterparty_node_id, shared_secret, next_hop,
6495+
chan.context.config().accept_underpaying_htlcs, next_packet_pk_opt,
6496+
),
64746497
Err(e) => PendingHTLCStatus::Fail(e)
64756498
};
64766499
let create_pending_htlc_status = |chan: &Channel<SP>, pending_forward_info: PendingHTLCStatus, error_code: u16| {
@@ -10014,7 +10037,7 @@ where
1001410037
log_error!(logger, "A ChannelManager is stale compared to the current ChannelMonitor!");
1001510038
log_error!(logger, " The channel will be force-closed and the latest commitment transaction from the ChannelMonitor broadcast.");
1001610039
if channel.context.get_latest_monitor_update_id() < monitor.get_latest_update_id() {
10017-
log_error!(args.logger, " The ChannelMonitor for channel {} is at update_id {} but the ChannelManager is at update_id {}.",
10040+
log_error!(logger, " The ChannelMonitor for channel {} is at update_id {} but the ChannelManager is at update_id {}.",
1001810041
&channel.context.channel_id(), monitor.get_latest_update_id(), channel.context.get_latest_monitor_update_id());
1001910042
}
1002010043
if channel.get_cur_holder_commitment_transaction_number() > monitor.get_cur_holder_commitment_number() {
@@ -10410,7 +10433,7 @@ where
1041010433
let counterparty_opt = id_to_peer.get(&monitor.get_funding_txo().0.to_channel_id());
1041110434
let chan_id = monitor.get_funding_txo().0.to_channel_id();
1041210435
if counterparty_opt.is_none() {
10413-
let logger = WithContext::from(&args.logger, None, Some(chan_id));
10436+
let logger = WithChannelMonitor::from(&args.logger, monitor);
1041410437
for (htlc_source, (htlc, _)) in monitor.get_pending_or_resolved_outbound_htlcs() {
1041510438
if let HTLCSource::OutboundRoute { payment_id, session_priv, path, .. } = htlc_source {
1041610439
if path.hops.is_empty() {

lightning/src/util/logger.rs

+4
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,10 @@ pub struct Record<'a> {
9898
/// The verbosity level of the message.
9999
pub level: Level,
100100
/// The node id of the peer pertaining to the logged record.
101+
///
102+
/// Note that in some cases a [`Self::channel_id`] may be filled in but this may still be
103+
/// `None`, depending on if the peer information is readily available in LDK when the log is
104+
/// generated.
101105
pub peer_id: Option<PublicKey>,
102106
/// The channel id of the channel pertaining to the logged record. May be a temporary id before
103107
/// the channel has been funded.

0 commit comments

Comments
 (0)