Skip to content

Commit ed8da60

Browse files
committed
Deduplicate node id logging
1 parent f63066e commit ed8da60

File tree

2 files changed

+55
-82
lines changed

2 files changed

+55
-82
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6746,7 +6746,10 @@ where
67466746
Some(*next_hop_channel_id),
67476747
None,
67486748
);
6749-
log_error!(logger, "Channel not found for the passed counterparty node_id {next_node_id} when attempting to forward intercepted HTLC");
6749+
log_error!(
6750+
logger,
6751+
"Channel not found when attempting to forward intercepted HTLC"
6752+
);
67506753
return Err(APIError::ChannelUnavailable {
67516754
err: format!(
67526755
"Channel with id {next_hop_channel_id} not found for the passed counterparty node_id {next_node_id}"
@@ -7485,8 +7488,8 @@ where
74857488
} else {
74867489
"alternate"
74877490
};
7488-
log_trace!(logger, "Forwarding HTLC from SCID {} with payment_hash {} and next hop SCID {} over {} with corresponding peer {}",
7489-
prev_outbound_scid_alias, &payment_hash, short_chan_id, channel_description, &counterparty_node_id);
7491+
log_trace!(logger, "Forwarding HTLC from SCID {} with payment_hash {} and next hop SCID {} over {}",
7492+
prev_outbound_scid_alias, &payment_hash, short_chan_id, channel_description);
74907493
if let Err((reason, msg)) = optimal_channel.queue_add_htlc(
74917494
*outgoing_amt_msat,
74927495
*payment_hash,
@@ -7500,9 +7503,8 @@ where
75007503
) {
75017504
log_trace!(
75027505
logger,
7503-
"Failed to forward HTLC with payment_hash {} to peer {}: {}",
7506+
"Failed to forward HTLC with payment_hash {}: {}",
75047507
&payment_hash,
7505-
&counterparty_node_id,
75067508
msg
75077509
);
75087510

@@ -8188,8 +8190,7 @@ where
81888190
if peer_state.is_connected {
81898191
if funded_chan.should_disconnect_peer_awaiting_response() {
81908192
let logger = WithChannelContext::from(&self.logger, &funded_chan.context, None);
8191-
log_debug!(logger, "Disconnecting peer {} due to not making any progress",
8192-
counterparty_node_id);
8193+
log_debug!(logger, "Disconnecting peer due to not making any progress");
81938194
pending_msg_events.push(MessageSendEvent::HandleError {
81948195
node_id: counterparty_node_id,
81958196
action: msgs::ErrorAction::DisconnectPeerWithWarning {
@@ -9848,9 +9849,9 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
98489849
let per_peer_state = self.per_peer_state.read().unwrap();
98499850
let peer_state_mutex = per_peer_state.get(counterparty_node_id)
98509851
.ok_or_else(|| {
9851-
let err_str = format!("Can't find a peer matching the passed counterparty node_id {counterparty_node_id}");
9852-
log_error!(logger, "{}", err_str);
9852+
log_error!(logger, "Can't find peer matching the passed counterparty node_id");
98539853

9854+
let err_str = format!("Can't find a peer matching the passed counterparty node_id {counterparty_node_id}");
98549855
APIError::ChannelUnavailable { err: err_str }
98559856
})?;
98569857
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
@@ -13684,8 +13685,7 @@ where
1368413685
let remove_peer = {
1368513686
log_debug!(
1368613687
WithContext::from(&self.logger, Some(counterparty_node_id), None, None),
13687-
"Marking channels with {} disconnected and generating channel_updates.",
13688-
log_pubkey!(counterparty_node_id)
13688+
"Marking channels disconnected and generating channel_updates.",
1368913689
);
1369013690
if let Some(peer_state_mutex) = per_peer_state.get(&counterparty_node_id) {
1369113691
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
@@ -13870,7 +13870,7 @@ where
1387013870
}
1387113871
}
1387213872

13873-
log_debug!(logger, "Generating channel_reestablish events for {}", log_pubkey!(counterparty_node_id));
13873+
log_debug!(logger, "Generating channel_reestablish events");
1387413874

1387513875
let per_peer_state = self.per_peer_state.read().unwrap();
1387613876
if let Some(peer_state_mutex) = per_peer_state.get(&counterparty_node_id) {

lightning/src/ln/peer_handler.rs

Lines changed: 43 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -1729,12 +1729,12 @@ where
17291729
/// Append a message to a peer's pending outbound/write buffer
17301730
fn enqueue_message<M: wire::Type>(&self, peer: &mut Peer, message: &M) {
17311731
let their_node_id = peer.their_node_id.map(|p| p.0);
1732-
if let Some(node_id) = their_node_id {
1732+
if their_node_id.is_some() {
17331733
let logger = WithContext::from(&self.logger, their_node_id, None, None);
17341734
if is_gossip_msg(message.type_id()) {
1735-
log_gossip!(logger, "Enqueueing message {:?} to {}", message, node_id);
1735+
log_gossip!(logger, "Enqueueing message {:?}", message);
17361736
} else {
1737-
log_trace!(logger, "Enqueueing message {:?} to {}", message, node_id);
1737+
log_trace!(logger, "Enqueueing message {:?}", message);
17381738
}
17391739
} else {
17401740
debug_assert!(false, "node_id should be set by the time we send a message");
@@ -1842,7 +1842,7 @@ where
18421842
let logger = WithContext::from(&self.logger, Some(their_node_id), None, None);
18431843
match self.node_id_to_descriptor.lock().unwrap().entry(their_node_id) {
18441844
hash_map::Entry::Occupied(e) => {
1845-
log_trace!(logger, "Got second connection with {}, closing", their_node_id);
1845+
log_trace!(logger, "Got second connection, closing");
18461846
// Unset `their_node_id` so that we don't generate a peer_disconnected event
18471847
peer.their_node_id = None;
18481848
// Check that the peers map is consistent with the
@@ -1852,7 +1852,7 @@ where
18521852
return Err(PeerHandleError { })
18531853
},
18541854
hash_map::Entry::Vacant(entry) => {
1855-
log_debug!(logger, "Finished noise handshake for connection with {}", their_node_id);
1855+
log_debug!(logger, "Finished noise handshake for connection");
18561856
entry.insert(peer_descriptor.clone())
18571857
},
18581858
};
@@ -2141,8 +2141,7 @@ where
21412141
if msg.features.requires_unknown_bits_from(&our_features) {
21422142
log_debug!(
21432143
logger,
2144-
"Peer {} requires features unknown to us: {:?}",
2145-
their_node_id,
2144+
"Peer requires features unknown to us: {:?}",
21462145
msg.features.required_unknown_bits_from(&our_features)
21472146
);
21482147
return Err(PeerHandleError {}.into());
@@ -2162,12 +2161,7 @@ where
21622161
return Err(PeerHandleError {}.into());
21632162
}
21642163

2165-
log_info!(
2166-
logger,
2167-
"Received peer Init message from {}: {}",
2168-
their_node_id,
2169-
msg.features
2170-
);
2164+
log_info!(logger, "Received Init message: {}", msg.features);
21712165

21722166
// For peers not supporting gossip queries start sync now, otherwise wait until we receive a filter.
21732167
if msg.features.initial_routing_sync() && !msg.features.supports_gossip_queries() {
@@ -2177,20 +2171,12 @@ where
21772171
let inbound = peer_lock.inbound_connection;
21782172
let route_handler = &self.message_handler.route_handler;
21792173
if let Err(()) = route_handler.peer_connected(their_node_id, &msg, inbound) {
2180-
log_debug!(
2181-
logger,
2182-
"Route Handler decided we couldn't communicate with peer {}",
2183-
their_node_id,
2184-
);
2174+
log_debug!(logger, "Route Handler decided we couldn't communicate with peer");
21852175
return Err(PeerHandleError {}.into());
21862176
}
21872177
let chan_handler = &self.message_handler.chan_handler;
21882178
if let Err(()) = chan_handler.peer_connected(their_node_id, &msg, inbound) {
2189-
log_debug!(
2190-
logger,
2191-
"Channel Handler decided we couldn't communicate with peer {}",
2192-
their_node_id,
2193-
);
2179+
log_debug!(logger, "Channel Handler decided we couldn't communicate with peer");
21942180
self.message_handler.route_handler.peer_disconnected(their_node_id);
21952181
return Err(PeerHandleError {}.into());
21962182
}
@@ -2209,8 +2195,7 @@ where
22092195
if let Err(()) = custom_handler.peer_connected(their_node_id, &msg, inbound) {
22102196
log_debug!(
22112197
logger,
2212-
"Custom Message Handler decided we couldn't communicate with peer {}",
2213-
their_node_id,
2198+
"Custom Message Handler decided we couldn't communicate with peer"
22142199
);
22152200
self.message_handler.route_handler.peer_disconnected(their_node_id);
22162201
self.message_handler.chan_handler.peer_disconnected(their_node_id);
@@ -2221,8 +2206,7 @@ where
22212206
if let Err(()) = sends_handler.peer_connected(their_node_id, &msg, inbound) {
22222207
log_debug!(
22232208
logger,
2224-
"Sending-Only Message Handler decided we couldn't communicate with peer {}",
2225-
their_node_id,
2209+
"Sending-Only Message Handler decided we couldn't communicate with peer"
22262210
);
22272211
self.message_handler.route_handler.peer_disconnected(their_node_id);
22282212
self.message_handler.chan_handler.peer_disconnected(their_node_id);
@@ -2235,7 +2219,7 @@ where
22352219
peer_lock.their_features = Some(msg.features);
22362220
return Ok(None);
22372221
} else if peer_lock.their_features.is_none() {
2238-
log_debug!(logger, "Peer {} sent non-Init first message", their_node_id);
2222+
log_debug!(logger, "Peer sent non-Init first message");
22392223
return Err(PeerHandleError {}.into());
22402224
}
22412225

@@ -2407,9 +2391,9 @@ where
24072391
their_node_id: PublicKey, logger: &WithContext<'a, L>,
24082392
) -> Result<Option<BroadcastGossipMessage>, MessageHandlingError> {
24092393
if is_gossip_msg(message.type_id()) {
2410-
log_gossip!(logger, "Received message {:?} from {}", message, their_node_id);
2394+
log_gossip!(logger, "Received message {:?}", message);
24112395
} else {
2412-
log_trace!(logger, "Received message {:?} from {}", message, their_node_id);
2396+
log_trace!(logger, "Received message {:?}", message);
24132397
}
24142398

24152399
let mut should_forward = None;
@@ -2435,12 +2419,7 @@ where
24352419
}
24362420
},
24372421
wire::Message::Warning(msg) => {
2438-
log_debug!(
2439-
logger,
2440-
"Got warning message from {}: {}",
2441-
their_node_id,
2442-
PrintableString(&msg.data)
2443-
);
2422+
log_debug!(logger, "Got warning message: {}", PrintableString(&msg.data));
24442423
},
24452424

24462425
wire::Message::Ping(msg) => {
@@ -3128,8 +3107,7 @@ where
31283107
Some(msg.channel_id),
31293108
None
31303109
),
3131-
"Handling Shutdown event in peer_handler for node {}",
3132-
node_id
3110+
"Handling Shutdown event in peer_handler",
31333111
);
31343112
self.enqueue_message(&mut *get_peer_for_forwarding!(node_id)?, msg);
31353113
},
@@ -3239,20 +3217,23 @@ where
32393217
}
32403218
},
32413219
MessageSendEvent::SendChannelUpdate { ref node_id, ref msg } => {
3242-
log_trace!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendChannelUpdate event in peer_handler for node {} for channel {}",
3243-
node_id, msg.contents.short_channel_id);
3220+
log_trace!(
3221+
WithContext::from(&self.logger, Some(*node_id), None, None),
3222+
"Handling SendChannelUpdate event in peer_handler for channel {}",
3223+
msg.contents.short_channel_id
3224+
);
32443225
self.enqueue_message(&mut *get_peer_for_forwarding!(node_id)?, msg);
32453226
},
32463227
MessageSendEvent::HandleError { node_id, action } => {
32473228
let logger = WithContext::from(&self.logger, Some(node_id), None, None);
32483229
match action {
32493230
msgs::ErrorAction::DisconnectPeer { msg } => {
32503231
if let Some(msg) = msg.as_ref() {
3251-
log_trace!(logger, "Handling DisconnectPeer HandleError event in peer_handler for node {} with message {}",
3252-
node_id, msg.data);
3232+
log_trace!(logger, "Handling DisconnectPeer HandleError event in peer_handler with message {}",
3233+
msg.data);
32533234
} else {
3254-
log_trace!(logger, "Handling DisconnectPeer HandleError event in peer_handler for node {}",
3255-
node_id);
3235+
log_trace!(logger, "Handling DisconnectPeer HandleError event in peer_handler",
3236+
);
32563237
}
32573238
// We do not have the peers write lock, so we just store that we're
32583239
// about to disconnect the peer and do it after we finish
@@ -3263,8 +3244,8 @@ where
32633244
peers_to_disconnect.insert(node_id, msg);
32643245
},
32653246
msgs::ErrorAction::DisconnectPeerWithWarning { msg } => {
3266-
log_trace!(logger, "Handling DisconnectPeer HandleError event in peer_handler for node {} with message {}",
3267-
node_id, msg.data);
3247+
log_trace!(logger, "Handling DisconnectPeer HandleError event in peer_handler with message {}",
3248+
msg.data);
32683249
// We do not have the peers write lock, so we just store that we're
32693250
// about to disconnect the peer and do it after we finish
32703251
// processing most messages.
@@ -3275,21 +3256,19 @@ where
32753256
log_given_level!(
32763257
logger,
32773258
level,
3278-
"Received a HandleError event to be ignored for node {}",
3279-
node_id,
3259+
"Received a HandleError event to be ignored",
32803260
);
32813261
},
32823262
msgs::ErrorAction::IgnoreDuplicateGossip => {},
32833263
msgs::ErrorAction::IgnoreError => {
32843264
log_debug!(
32853265
logger,
3286-
"Received a HandleError event to be ignored for node {}",
3287-
node_id,
3266+
"Received a HandleError event to be ignored",
32883267
);
32893268
},
32903269
msgs::ErrorAction::SendErrorMessage { ref msg } => {
3291-
log_trace!(logger, "Handling SendErrorMessage HandleError event in peer_handler for node {} with message {}",
3292-
node_id,
3270+
log_trace!(logger, "Handling SendErrorMessage HandleError event in peer_handler with message {}",
3271+
32933272
msg.data);
32943273
self.enqueue_message(
32953274
&mut *get_peer_for_forwarding!(&node_id)?,
@@ -3300,8 +3279,8 @@ where
33003279
ref msg,
33013280
ref log_level,
33023281
} => {
3303-
log_given_level!(logger, *log_level, "Handling SendWarningMessage HandleError event in peer_handler for node {} with message {}",
3304-
node_id,
3282+
log_given_level!(logger, *log_level, "Handling SendWarningMessage HandleError event in peer_handler with message {}",
3283+
33053284
msg.data);
33063285
self.enqueue_message(
33073286
&mut *get_peer_for_forwarding!(&node_id)?,
@@ -3311,30 +3290,30 @@ where
33113290
}
33123291
},
33133292
MessageSendEvent::SendChannelRangeQuery { ref node_id, ref msg } => {
3314-
log_gossip!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendChannelRangeQuery event in peer_handler for node {} with first_blocknum={}, number_of_blocks={}",
3315-
node_id,
3293+
log_gossip!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendChannelRangeQuery event in peer_handler with first_blocknum={}, number_of_blocks={}",
3294+
33163295
msg.first_blocknum,
33173296
msg.number_of_blocks);
33183297
self.enqueue_message(&mut *get_peer_for_forwarding!(node_id)?, msg);
33193298
},
33203299
MessageSendEvent::SendShortIdsQuery { ref node_id, ref msg } => {
3321-
log_gossip!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendShortIdsQuery event in peer_handler for node {} with num_scids={}",
3322-
node_id,
3300+
log_gossip!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendShortIdsQuery event in peer_handler with num_scids={}",
3301+
33233302
msg.short_channel_ids.len());
33243303
self.enqueue_message(&mut *get_peer_for_forwarding!(node_id)?, msg);
33253304
},
33263305
MessageSendEvent::SendReplyChannelRange { ref node_id, ref msg } => {
3327-
log_gossip!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendReplyChannelRange event in peer_handler for node {} with num_scids={} first_blocknum={} number_of_blocks={}, sync_complete={}",
3328-
node_id,
3306+
log_gossip!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendReplyChannelRange event in peer_handler with num_scids={} first_blocknum={} number_of_blocks={}, sync_complete={}",
3307+
33293308
msg.short_channel_ids.len(),
33303309
msg.first_blocknum,
33313310
msg.number_of_blocks,
33323311
msg.sync_complete);
33333312
self.enqueue_message(&mut *get_peer_for_forwarding!(node_id)?, msg);
33343313
},
33353314
MessageSendEvent::SendGossipTimestampFilter { ref node_id, ref msg } => {
3336-
log_gossip!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendGossipTimestampFilter event in peer_handler for node {} with first_timestamp={}, timestamp_range={}",
3337-
node_id,
3315+
log_gossip!(WithContext::from(&self.logger, Some(*node_id), None, None), "Handling SendGossipTimestampFilter event in peer_handler with first_timestamp={}, timestamp_range={}",
3316+
33383317
msg.first_timestamp,
33393318
msg.timestamp_range);
33403319
self.enqueue_message(&mut *get_peer_for_forwarding!(node_id)?, msg);
@@ -3445,8 +3424,7 @@ where
34453424
if let Some((node_id, _)) = peer.their_node_id {
34463425
log_trace!(
34473426
WithContext::from(&self.logger, Some(node_id), None, None),
3448-
"Disconnecting peer with id {} due to {}",
3449-
node_id,
3427+
"Disconnecting peer due to {}",
34503428
reason
34513429
);
34523430
self.message_handler.route_handler.peer_disconnected(node_id);
@@ -3471,12 +3449,7 @@ where
34713449
let peer = peer_lock.lock().unwrap();
34723450
if let Some((node_id, _)) = peer.their_node_id {
34733451
let logger = WithContext::from(&self.logger, Some(node_id), None, None);
3474-
log_trace!(
3475-
logger,
3476-
"Handling disconnection of peer {} because {}",
3477-
node_id,
3478-
reason
3479-
);
3452+
log_trace!(logger, "Handling disconnection of peer because {}", reason);
34803453
let removed = self.node_id_to_descriptor.lock().unwrap().remove(&node_id);
34813454
debug_assert!(removed.is_some(), "descriptor maps should be consistent");
34823455
if !peer.handshake_complete() {

0 commit comments

Comments
 (0)