Skip to content

Marginally optimize test logging #2766

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Dec 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions lightning-background-processor/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1375,9 +1375,9 @@ mod tests {
let desired_log_1 = "Calling ChannelManager's timer_tick_occurred".to_string();
let desired_log_2 = "Calling PeerManager's timer_tick_occurred".to_string();
let desired_log_3 = "Rebroadcasting monitor's pending claims".to_string();
if log_entries.get(&("lightning_background_processor".to_string(), desired_log_1)).is_some() &&
log_entries.get(&("lightning_background_processor".to_string(), desired_log_2)).is_some() &&
log_entries.get(&("lightning_background_processor".to_string(), desired_log_3)).is_some() {
if log_entries.get(&("lightning_background_processor", desired_log_1)).is_some() &&
log_entries.get(&("lightning_background_processor", desired_log_2)).is_some() &&
log_entries.get(&("lightning_background_processor", desired_log_3)).is_some() {
break
}
}
Expand Down Expand Up @@ -1556,7 +1556,7 @@ mod tests {
loop {
let log_entries = nodes[0].logger.lines.lock().unwrap();
let expected_log = "Persisting scorer".to_string();
if log_entries.get(&("lightning_background_processor".to_string(), expected_log)).is_some() {
if log_entries.get(&("lightning_background_processor", expected_log)).is_some() {
break
}
}
Expand All @@ -1580,7 +1580,7 @@ mod tests {
$sleep;
let log_entries = $nodes[0].logger.lines.lock().unwrap();
let loop_counter = "Calling ChannelManager's timer_tick_occurred".to_string();
if *log_entries.get(&("lightning_background_processor".to_string(), loop_counter))
if *log_entries.get(&("lightning_background_processor", loop_counter))
.unwrap_or(&0) > 1
{
// Wait until the loop has gone around at least twice.
Expand Down Expand Up @@ -1792,7 +1792,7 @@ mod tests {

let log_entries = nodes[0].logger.lines.lock().unwrap();
let expected_log = "Persisting scorer after update".to_string();
assert_eq!(*log_entries.get(&("lightning_background_processor".to_string(), expected_log)).unwrap(), 5);
assert_eq!(*log_entries.get(&("lightning_background_processor", expected_log)).unwrap(), 5);
}

#[tokio::test]
Expand Down Expand Up @@ -1838,7 +1838,7 @@ mod tests {

let log_entries = nodes[0].logger.lines.lock().unwrap();
let expected_log = "Persisting scorer after update".to_string();
assert_eq!(*log_entries.get(&("lightning_background_processor".to_string(), expected_log)).unwrap(), 5);
assert_eq!(*log_entries.get(&("lightning_background_processor", expected_log)).unwrap(), 5);
});

let (r1, r2) = tokio::join!(t1, t2);
Expand Down
20 changes: 10 additions & 10 deletions lightning/src/ln/functional_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -693,7 +693,7 @@ fn test_update_fee_that_funder_cannot_afford() {
*feerate_lock += 4;
}
nodes[0].node.timer_tick_occurred();
nodes[0].logger.assert_log("lightning::ln::channel".to_string(), format!("Cannot afford to send new feerate at {}", feerate + 4), 1);
nodes[0].logger.assert_log("lightning::ln::channel", format!("Cannot afford to send new feerate at {}", feerate + 4), 1);
check_added_monitors!(nodes[0], 0);

const INITIAL_COMMITMENT_NUMBER: u64 = 281474976710654;
Expand Down Expand Up @@ -768,7 +768,7 @@ fn test_update_fee_that_funder_cannot_afford() {
//check to see if the funder, who sent the update_fee request, can afford the new fee (funder_balance >= fee+channel_reserve)
//Should produce and error.
nodes[1].node.handle_commitment_signed(&nodes[0].node.get_our_node_id(), &commit_signed_msg);
nodes[1].logger.assert_log("lightning::ln::channelmanager".to_string(), "Funding remote cannot afford proposed new fee".to_string(), 1);
nodes[1].logger.assert_log("lightning::ln::channelmanager", "Funding remote cannot afford proposed new fee".to_string(), 1);
check_added_monitors!(nodes[1], 1);
check_closed_broadcast!(nodes[1], true);
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: String::from("Funding remote cannot afford proposed new fee") },
Expand Down Expand Up @@ -1529,7 +1529,7 @@ fn test_fee_spike_violation_fails_htlc() {
},
_ => panic!("Unexpected event"),
};
nodes[1].logger.assert_log("lightning::ln::channel".to_string(),
nodes[1].logger.assert_log("lightning::ln::channel",
format!("Attempting to fail HTLC due to fee spike buffer violation in channel {}. Rebalancing is required.", raa_msg.channel_id), 1);

check_added_monitors!(nodes[1], 2);
Expand Down Expand Up @@ -1617,7 +1617,7 @@ fn test_chan_reserve_violation_inbound_htlc_outbound_channel() {

nodes[0].node.handle_update_add_htlc(&nodes[1].node.get_our_node_id(), &msg);
// Check that the payment failed and the channel is closed in response to the malicious UpdateAdd.
nodes[0].logger.assert_log("lightning::ln::channelmanager".to_string(), "Cannot accept HTLC that would put our balance under counterparty-announced channel reserve value".to_string(), 1);
nodes[0].logger.assert_log("lightning::ln::channelmanager", "Cannot accept HTLC that would put our balance under counterparty-announced channel reserve value".to_string(), 1);
assert_eq!(nodes[0].node.list_channels().len(), 0);
let err_msg = check_closed_broadcast!(nodes[0], true).unwrap();
assert_eq!(err_msg.data, "Cannot accept HTLC that would put our balance under counterparty-announced channel reserve value");
Expand Down Expand Up @@ -1796,7 +1796,7 @@ fn test_chan_reserve_violation_inbound_htlc_inbound_chan() {

nodes[1].node.handle_update_add_htlc(&nodes[0].node.get_our_node_id(), &msg);
// Check that the payment failed and the channel is closed in response to the malicious UpdateAdd.
nodes[1].logger.assert_log("lightning::ln::channelmanager".to_string(), "Remote HTLC add would put them under remote reserve value".to_string(), 1);
nodes[1].logger.assert_log("lightning::ln::channelmanager", "Remote HTLC add would put them under remote reserve value".to_string(), 1);
assert_eq!(nodes[1].node.list_channels().len(), 1);
let err_msg = check_closed_broadcast!(nodes[1], true).unwrap();
assert_eq!(err_msg.data, "Remote HTLC add would put them under remote reserve value");
Expand Down Expand Up @@ -5930,7 +5930,7 @@ fn test_fail_holding_cell_htlc_upon_free() {
// us to surface its failure to the user.
chan_stat = get_channel_value_stat!(nodes[0], nodes[1], chan.2);
assert_eq!(chan_stat.holding_cell_outbound_amount_msat, 0);
nodes[0].logger.assert_log("lightning::ln::channel".to_string(), format!("Freeing holding cell with 1 HTLC updates in channel {}", chan.2), 1);
nodes[0].logger.assert_log("lightning::ln::channel", format!("Freeing holding cell with 1 HTLC updates in channel {}", chan.2), 1);

// Check that the payment failed to be sent out.
let events = nodes[0].node.get_and_clear_pending_events();
Expand Down Expand Up @@ -6018,7 +6018,7 @@ fn test_free_and_fail_holding_cell_htlcs() {
// to surface its failure to the user. The first payment should succeed.
chan_stat = get_channel_value_stat!(nodes[0], nodes[1], chan.2);
assert_eq!(chan_stat.holding_cell_outbound_amount_msat, 0);
nodes[0].logger.assert_log("lightning::ln::channel".to_string(), format!("Freeing holding cell with 2 HTLC updates in channel {}", chan.2), 1);
nodes[0].logger.assert_log("lightning::ln::channel", format!("Freeing holding cell with 2 HTLC updates in channel {}", chan.2), 1);

// Check that the second payment failed to be sent out.
let events = nodes[0].node.get_and_clear_pending_events();
Expand Down Expand Up @@ -6292,7 +6292,7 @@ fn test_update_add_htlc_bolt2_receiver_zero_value_msat() {
updates.update_add_htlcs[0].amount_msat = 0;

nodes[1].node.handle_update_add_htlc(&nodes[0].node.get_our_node_id(), &updates.update_add_htlcs[0]);
nodes[1].logger.assert_log("lightning::ln::channelmanager".to_string(), "Remote side tried to send a 0-msat HTLC".to_string(), 1);
nodes[1].logger.assert_log("lightning::ln::channelmanager", "Remote side tried to send a 0-msat HTLC".to_string(), 1);
check_closed_broadcast!(nodes[1], true).unwrap();
check_added_monitors!(nodes[1], 1);
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "Remote side tried to send a 0-msat HTLC".to_string() },
Expand Down Expand Up @@ -9843,10 +9843,10 @@ fn do_test_max_dust_htlc_exposure(dust_outbound_balance: bool, exposure_breach_e
// Outbound dust balance: 6399 sats
let dust_inbound_overflow = dust_inbound_htlc_on_holder_tx_msat * (dust_inbound_htlc_on_holder_tx + 1);
let dust_outbound_overflow = dust_outbound_htlc_on_holder_tx_msat * dust_outbound_htlc_on_holder_tx + dust_inbound_htlc_on_holder_tx_msat;
nodes[0].logger.assert_log("lightning::ln::channel".to_string(), format!("Cannot accept value that would put our exposure to dust HTLCs at {} over the limit {} on holder commitment tx", if dust_outbound_balance { dust_outbound_overflow } else { dust_inbound_overflow }, max_dust_htlc_exposure_msat), 1);
nodes[0].logger.assert_log("lightning::ln::channel", format!("Cannot accept value that would put our exposure to dust HTLCs at {} over the limit {} on holder commitment tx", if dust_outbound_balance { dust_outbound_overflow } else { dust_inbound_overflow }, max_dust_htlc_exposure_msat), 1);
} else {
// Outbound dust balance: 5200 sats
nodes[0].logger.assert_log("lightning::ln::channel".to_string(),
nodes[0].logger.assert_log("lightning::ln::channel",
format!("Cannot accept value that would put our exposure to dust HTLCs at {} over the limit {} on counterparty commitment tx",
dust_htlc_on_counterparty_tx_msat * (dust_htlc_on_counterparty_tx - 1) + dust_htlc_on_counterparty_tx_msat + 4,
max_dust_htlc_exposure_msat), 1);
Expand Down
2 changes: 1 addition & 1 deletion lightning/src/ln/reload_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ fn do_test_data_loss_protect(reconnect_panicing: bool, substantially_old: bool,
// `not_stale` to test the boundary condition.
let pay_params = PaymentParameters::for_keysend(nodes[1].node.get_our_node_id(), 100, false);
let route_params = RouteParameters::from_payment_params_and_value(pay_params, 40000);
nodes[0].node.send_spontaneous_payment_with_retry(None, RecipientOnionFields::spontaneous_empty(), PaymentId([0; 32]), route_params, Retry::Attempts(0));
nodes[0].node.send_spontaneous_payment_with_retry(None, RecipientOnionFields::spontaneous_empty(), PaymentId([0; 32]), route_params, Retry::Attempts(0)).unwrap();
check_added_monitors(&nodes[0], 1);
let update_add_commit = SendEvent::from_node(&nodes[0]);

Expand Down
21 changes: 9 additions & 12 deletions lightning/src/util/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -930,8 +930,8 @@ impl events::MessageSendEventsProvider for TestRoutingMessageHandler {
pub struct TestLogger {
level: Level,
pub(crate) id: String,
pub lines: Mutex<HashMap<(String, String), usize>>,
pub context: Mutex<HashMap<(String, Option<PublicKey>, Option<ChannelId>), usize>>,
pub lines: Mutex<HashMap<(&'static str, String), usize>>,
pub context: Mutex<HashMap<(&'static str, Option<PublicKey>, Option<ChannelId>), usize>>,
}

impl TestLogger {
Expand All @@ -949,7 +949,7 @@ impl TestLogger {
pub fn enable(&mut self, level: Level) {
self.level = level;
}
pub fn assert_log(&self, module: String, line: String, count: usize) {
pub fn assert_log(&self, module: &str, line: String, count: usize) {
let log_entries = self.lines.lock().unwrap();
assert_eq!(log_entries.get(&(module, line)), Some(&count));
}
Expand All @@ -961,7 +961,7 @@ impl TestLogger {
pub fn assert_log_contains(&self, module: &str, line: &str, count: usize) {
let log_entries = self.lines.lock().unwrap();
let l: usize = log_entries.iter().filter(|&(&(ref m, ref l), _c)| {
m == module && l.contains(line)
*m == module && l.contains(line)
}).map(|(_, c) | { c }).sum();
assert_eq!(l, count)
}
Expand All @@ -974,7 +974,7 @@ impl TestLogger {
pub fn assert_log_regex(&self, module: &str, pattern: regex::Regex, count: usize) {
let log_entries = self.lines.lock().unwrap();
let l: usize = log_entries.iter().filter(|&(&(ref m, ref l), _c)| {
m == module && pattern.is_match(&l)
*m == module && pattern.is_match(&l)
}).map(|(_, c) | { c }).sum();
assert_eq!(l, count)
}
Expand All @@ -983,18 +983,15 @@ impl TestLogger {
&self, module: &str, peer_id: Option<PublicKey>, channel_id: Option<ChannelId>, count: usize
) {
let context_entries = self.context.lock().unwrap();
let l: usize = context_entries.iter()
.filter(|&(&(ref m, ref p, ref c), _)| m == module && *p == peer_id && *c == channel_id)
.map(|(_, c) | c)
.sum();
assert_eq!(l, count)
let l = context_entries.get(&(module, peer_id, channel_id)).unwrap();
assert_eq!(*l, count)
}
}

impl Logger for TestLogger {
fn log(&self, record: Record) {
*self.lines.lock().unwrap().entry((record.module_path.to_string(), format!("{}", record.args))).or_insert(0) += 1;
*self.context.lock().unwrap().entry((record.module_path.to_string(), record.peer_id, record.channel_id)).or_insert(0) += 1;
*self.lines.lock().unwrap().entry((record.module_path, format!("{}", record.args))).or_insert(0) += 1;
*self.context.lock().unwrap().entry((record.module_path, record.peer_id, record.channel_id)).or_insert(0) += 1;
if record.level >= self.level {
#[cfg(all(not(ldk_bench), feature = "std"))] {
let pfx = format!("{} {} [{}:{}]", self.id, record.level.to_string(), record.module_path, record.line);
Expand Down