@@ -5323,12 +5323,7 @@ impl<SP: Deref> Channel<SP> where
5323
5323
assert!(!self.context.is_outbound() || self.context.minimum_depth == Some(0),
5324
5324
"Funding transaction broadcast by the local client before it should have - LDK didn't do it!");
5325
5325
self.context.monitor_pending_channel_ready = false;
5326
- let next_per_commitment_point = self.context.holder_signer.as_ref().get_per_commitment_point(self.context.holder_commitment_point.transaction_number(), &self.context.secp_ctx);
5327
- Some(msgs::ChannelReady {
5328
- channel_id: self.context.channel_id(),
5329
- next_per_commitment_point,
5330
- short_channel_id_alias: Some(self.context.outbound_scid_alias),
5331
- })
5326
+ Some(self.get_channel_ready())
5332
5327
} else { None };
5333
5328
5334
5329
let announcement_sigs = self.get_announcement_sigs(node_signer, chain_hash, user_config, best_block_height, logger);
@@ -5626,13 +5621,8 @@ impl<SP: Deref> Channel<SP> where
5626
5621
}
5627
5622
5628
5623
// We have OurChannelReady set!
5629
- let next_per_commitment_point = self.context.holder_signer.as_ref().get_per_commitment_point(self.context.holder_commitment_point.transaction_number(), &self.context.secp_ctx);
5630
5624
return Ok(ReestablishResponses {
5631
- channel_ready: Some(msgs::ChannelReady {
5632
- channel_id: self.context.channel_id(),
5633
- next_per_commitment_point,
5634
- short_channel_id_alias: Some(self.context.outbound_scid_alias),
5635
- }),
5625
+ channel_ready: Some(self.get_channel_ready()),
5636
5626
raa: None, commitment_update: None,
5637
5627
order: RAACommitmentOrder::CommitmentFirst,
5638
5628
shutdown_msg, announcement_sigs,
@@ -5671,12 +5661,7 @@ impl<SP: Deref> Channel<SP> where
5671
5661
5672
5662
let channel_ready = if msg.next_local_commitment_number == 1 && INITIAL_COMMITMENT_NUMBER - self.context.holder_commitment_point.transaction_number() == 1 {
5673
5663
// We should never have to worry about MonitorUpdateInProgress resending ChannelReady
5674
- let next_per_commitment_point = self.context.holder_signer.as_ref().get_per_commitment_point(self.context.holder_commitment_point.transaction_number(), &self.context.secp_ctx);
5675
- Some(msgs::ChannelReady {
5676
- channel_id: self.context.channel_id(),
5677
- next_per_commitment_point,
5678
- short_channel_id_alias: Some(self.context.outbound_scid_alias),
5679
- })
5664
+ Some(self.get_channel_ready())
5680
5665
} else { None };
5681
5666
5682
5667
if msg.next_local_commitment_number == next_counterparty_commitment_number {
@@ -6496,7 +6481,9 @@ impl<SP: Deref> Channel<SP> where
6496
6481
self.context.channel_update_status = status;
6497
6482
}
6498
6483
6499
- fn check_get_channel_ready(&mut self, height: u32) -> Option<msgs::ChannelReady> {
6484
+ fn check_get_channel_ready<L: Deref>(&mut self, height: u32, logger: &L) -> Option<msgs::ChannelReady>
6485
+ where L::Target: Logger
6486
+ {
6500
6487
// Called:
6501
6488
// * always when a new block/transactions are confirmed with the new height
6502
6489
// * when funding is signed with a height of 0
@@ -6548,22 +6535,41 @@ impl<SP: Deref> Channel<SP> where
6548
6535
false
6549
6536
};
6550
6537
6551
- if need_commitment_update {
6552
- if !self.context.channel_state.is_monitor_update_in_progress() {
6553
- if !self.context.channel_state.is_peer_disconnected() {
6554
- let next_per_commitment_point =
6555
- self.context.holder_signer.as_ref().get_per_commitment_point(INITIAL_COMMITMENT_NUMBER - 1, &self.context.secp_ctx);
6556
- return Some(msgs::ChannelReady {
6557
- channel_id: self.context.channel_id,
6558
- next_per_commitment_point,
6559
- short_channel_id_alias: Some(self.context.outbound_scid_alias),
6560
- });
6561
- }
6562
- } else {
6563
- self.context.monitor_pending_channel_ready = true;
6564
- }
6538
+ if !need_commitment_update {
6539
+ log_debug!(logger, "Not producing channel_ready: we do not need a commitment update");
6540
+ return None;
6541
+ }
6542
+
6543
+ if self.context.channel_state.is_monitor_update_in_progress() {
6544
+ log_debug!(logger, "Not producing channel_ready: a monitor update is in progress. Setting monitor_pending_channel_ready.");
6545
+ self.context.monitor_pending_channel_ready = true;
6546
+ return None;
6547
+ }
6548
+
6549
+ if self.context.channel_state.is_peer_disconnected() {
6550
+ log_debug!(logger, "Not producing channel_ready: the peer is disconnected.");
6551
+ return None;
6552
+ }
6553
+
6554
+ if self.context.signer_pending_funding {
6555
+ // TODO: set signer_pending_channel_ready
6556
+ log_debug!(logger, "Can't produce channel_ready: the signer is pending funding.");
6557
+ return None;
6558
+ }
6559
+
6560
+ // TODO: when get_per_commiment_point becomes async, check if the point is
6561
+ // available, if not, set signer_pending_channel_ready and return None
6562
+
6563
+ Some(self.get_channel_ready())
6564
+ }
6565
+
6566
+ fn get_channel_ready(&self) -> msgs::ChannelReady {
6567
+ debug_assert!(self.context.holder_commitment_point.is_available());
6568
+ msgs::ChannelReady {
6569
+ channel_id: self.context.channel_id(),
6570
+ next_per_commitment_point: self.context.holder_commitment_point.current_point(),
6571
+ short_channel_id_alias: Some(self.context.outbound_scid_alias),
6565
6572
}
6566
- None
6567
6573
}
6568
6574
6569
6575
/// When a transaction is confirmed, we check whether it is or spends the funding transaction
@@ -6630,7 +6636,7 @@ impl<SP: Deref> Channel<SP> where
6630
6636
// If we allow 1-conf funding, we may need to check for channel_ready here and
6631
6637
// send it immediately instead of waiting for a best_block_updated call (which
6632
6638
// may have already happened for this block).
6633
- if let Some(channel_ready) = self.check_get_channel_ready(height) {
6639
+ if let Some(channel_ready) = self.check_get_channel_ready(height, logger ) {
6634
6640
log_info!(logger, "Sending a channel_ready to our peer for channel {}", &self.context.channel_id);
6635
6641
let announcement_sigs = self.get_announcement_sigs(node_signer, chain_hash, user_config, height, logger);
6636
6642
msgs = (Some(channel_ready), announcement_sigs);
@@ -6696,7 +6702,7 @@ impl<SP: Deref> Channel<SP> where
6696
6702
6697
6703
self.context.update_time_counter = cmp::max(self.context.update_time_counter, highest_header_time);
6698
6704
6699
- if let Some(channel_ready) = self.check_get_channel_ready(height) {
6705
+ if let Some(channel_ready) = self.check_get_channel_ready(height, logger ) {
6700
6706
let announcement_sigs = if let Some((chain_hash, node_signer, user_config)) = chain_node_signer {
6701
6707
self.get_announcement_sigs(node_signer, chain_hash, user_config, height, logger)
6702
6708
} else { None };
@@ -7883,7 +7889,7 @@ impl<SP: Deref> OutboundV1Channel<SP> where SP::Target: SignerProvider {
7883
7889
dual_funding_channel_context: None,
7884
7890
};
7885
7891
7886
- let need_channel_ready = channel.check_get_channel_ready(0).is_some();
7892
+ let need_channel_ready = channel.check_get_channel_ready(0, logger ).is_some();
7887
7893
channel.monitor_updating_paused(false, false, need_channel_ready, Vec::new(), Vec::new(), Vec::new());
7888
7894
Ok((channel, channel_monitor))
7889
7895
}
@@ -8172,7 +8178,7 @@ impl<SP: Deref> InboundV1Channel<SP> where SP::Target: SignerProvider {
8172
8178
#[cfg(any(dual_funding, splicing))]
8173
8179
dual_funding_channel_context: None,
8174
8180
};
8175
- let need_channel_ready = channel.check_get_channel_ready(0).is_some();
8181
+ let need_channel_ready = channel.check_get_channel_ready(0, logger ).is_some();
8176
8182
channel.monitor_updating_paused(false, false, need_channel_ready, Vec::new(), Vec::new(), Vec::new());
8177
8183
8178
8184
Ok((channel, funding_signed, channel_monitor))
@@ -11279,6 +11285,6 @@ mod tests {
11279
11285
// Clear the ChannelState::WaitingForBatch only when called by ChannelManager.
11280
11286
node_a_chan.set_batch_ready();
11281
11287
assert_eq!(node_a_chan.context.channel_state, ChannelState::AwaitingChannelReady(AwaitingChannelReadyFlags::THEIR_CHANNEL_READY));
11282
- assert!(node_a_chan.check_get_channel_ready(0).is_some());
11288
+ assert!(node_a_chan.check_get_channel_ready(0, &&logger ).is_some());
11283
11289
}
11284
11290
}
0 commit comments