@@ -5243,7 +5243,7 @@ where
52435243 .map_err(|e| {
52445244 let first_hop_key = Some(path.hops.first().unwrap().pubkey);
52455245 let logger = WithContext::from(&self.logger, first_hop_key, None, Some(*payment_hash));
5246- log_error!(logger, "Failed to build an onion for path for payment hash {payment_hash} ");
5246+ log_error!(logger, "Failed to build an onion for path");
52475247 e
52485248 })?;
52495249
@@ -5272,7 +5272,7 @@ where
52725272 );
52735273 log_trace!(
52745274 logger,
5275- "Attempting to send payment with payment hash {payment_hash} along path with next hop {first_chan_scid}"
5275+ "Attempting to send payment along path with next hop {first_chan_scid}"
52765276 );
52775277
52785278 let per_peer_state = self.per_peer_state.read().unwrap();
@@ -6746,7 +6746,10 @@ where
67466746 Some(*next_hop_channel_id),
67476747 None,
67486748 );
6749- log_error!(logger, "Channel not found for the passed counterparty node_id {next_node_id} when attempting to forward intercepted HTLC");
6749+ log_error!(
6750+ logger,
6751+ "Channel not found when attempting to forward intercepted HTLC"
6752+ );
67506753 return Err(APIError::ChannelUnavailable {
67516754 err: format!(
67526755 "Channel with id {next_hop_channel_id} not found for the passed counterparty node_id {next_node_id}"
@@ -7485,8 +7488,13 @@ where
74857488 } else {
74867489 "alternate"
74877490 };
7488- log_trace!(logger, "Forwarding HTLC from SCID {} with payment_hash {} and next hop SCID {} over {} with corresponding peer {}",
7489- prev_outbound_scid_alias, &payment_hash, short_chan_id, channel_description, &counterparty_node_id);
7491+ log_trace!(
7492+ logger,
7493+ "Forwarding HTLC from SCID {} with next hop SCID {} over {}",
7494+ prev_outbound_scid_alias,
7495+ short_chan_id,
7496+ channel_description
7497+ );
74907498 if let Err((reason, msg)) = optimal_channel.queue_add_htlc(
74917499 *outgoing_amt_msat,
74927500 *payment_hash,
@@ -7498,13 +7506,7 @@ where
74987506 &self.fee_estimator,
74997507 &&logger,
75007508 ) {
7501- log_trace!(
7502- logger,
7503- "Failed to forward HTLC with payment_hash {} to peer {}: {}",
7504- &payment_hash,
7505- &counterparty_node_id,
7506- msg
7507- );
7509+ log_trace!(logger, "Failed to forward HTLC: {}", msg);
75087510
75097511 if let Some(chan) = peer_state
75107512 .channel_by_id
@@ -8188,8 +8190,7 @@ where
81888190 if peer_state.is_connected {
81898191 if funded_chan.should_disconnect_peer_awaiting_response() {
81908192 let logger = WithChannelContext::from(&self.logger, &funded_chan.context, None);
8191- log_debug!(logger, "Disconnecting peer {} due to not making any progress",
8192- counterparty_node_id);
8193+ log_debug!(logger, "Disconnecting peer due to not making any progress");
81938194 pending_msg_events.push(MessageSendEvent::HandleError {
81948195 node_id: counterparty_node_id,
81958196 action: msgs::ErrorAction::DisconnectPeerWithWarning {
@@ -8560,9 +8561,8 @@ where
85608561 }) => {
85618562 log_trace!(
85628563 WithContext::from(&self.logger, None, Some(*channel_id), Some(*payment_hash)),
8563- "Failing {}HTLC with payment_hash {} backwards from us: {:?}",
8564+ "Failing {}HTLC backwards from us: {:?}",
85648565 if blinded_failure.is_some() { "blinded " } else { "" },
8565- &payment_hash,
85668566 onion_error
85678567 );
85688568 // In case of trampoline + phantom we prioritize the trampoline failure over the phantom failure.
@@ -9848,9 +9848,9 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
98489848 let per_peer_state = self.per_peer_state.read().unwrap();
98499849 let peer_state_mutex = per_peer_state.get(counterparty_node_id)
98509850 .ok_or_else(|| {
9851- let err_str = format!("Can't find a peer matching the passed counterparty node_id {counterparty_node_id}");
9852- log_error!(logger, "{}", err_str);
9851+ log_error!(logger, "Can't find peer matching the passed counterparty node_id");
98539852
9853+ let err_str = format!("Can't find a peer matching the passed counterparty node_id {counterparty_node_id}");
98549854 APIError::ChannelUnavailable { err: err_str }
98559855 })?;
98569856 let mut peer_state_lock = peer_state_mutex.lock().unwrap();
@@ -12013,11 +12013,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1201312013 None,
1201412014 );
1201512015 } else {
12016- log_trace!(
12017- logger,
12018- "Failing HTLC with hash {} from our monitor",
12019- &htlc_update.payment_hash
12020- );
12016+ log_trace!(logger, "Failing HTLC from our monitor");
1202112017 let failure_reason = LocalHTLCFailureReason::OnChainTimeout;
1202212018 let receiver = HTLCHandlingFailureType::Forward {
1202312019 node_id: Some(counterparty_node_id),
@@ -13684,8 +13680,7 @@ where
1368413680 let remove_peer = {
1368513681 log_debug!(
1368613682 WithContext::from(&self.logger, Some(counterparty_node_id), None, None),
13687- "Marking channels with {} disconnected and generating channel_updates.",
13688- log_pubkey!(counterparty_node_id)
13683+ "Marking channels disconnected and generating channel_updates.",
1368913684 );
1369013685 if let Some(peer_state_mutex) = per_peer_state.get(&counterparty_node_id) {
1369113686 let mut peer_state_lock = peer_state_mutex.lock().unwrap();
@@ -13870,7 +13865,7 @@ where
1387013865 }
1387113866 }
1387213867
13873- log_debug!(logger, "Generating channel_reestablish events for {}", log_pubkey!(counterparty_node_id) );
13868+ log_debug!(logger, "Generating channel_reestablish events" );
1387413869
1387513870 let per_peer_state = self.per_peer_state.read().unwrap();
1387613871 if let Some(peer_state_mutex) = per_peer_state.get(&counterparty_node_id) {
@@ -16868,8 +16863,7 @@ where
1686816863 Some(*payment_hash),
1686916864 );
1687016865 log_info!(logger,
16871- "Failing HTLC with hash {} as it is missing in the ChannelMonitor but was present in the (stale) ChannelManager",
16872- &payment_hash);
16866+ "Failing HTLC as it is missing in the ChannelMonitor but was present in the (stale) ChannelManager");
1687316867 failed_htlcs.push((
1687416868 channel_htlc_source.clone(),
1687516869 *payment_hash,
@@ -17523,18 +17517,20 @@ where
1752317517 // still have an entry for this HTLC in `forward_htlcs` or
1752417518 // `pending_intercepted_htlcs`, we were apparently not persisted after
1752517519 // the monitor was when forwarding the payment.
17526- decode_update_add_htlcs.retain(|src_outb_alias, update_add_htlcs| {
17527- update_add_htlcs.retain(|update_add_htlc| {
17528- let matches = *src_outb_alias == prev_hop_data.prev_outbound_scid_alias &&
17529- update_add_htlc.htlc_id == prev_hop_data.htlc_id;
17530- if matches {
17531- log_info!(logger, "Removing pending to-decode HTLC with hash {} as it was forwarded to the closed channel",
17532- &htlc.payment_hash);
17533- }
17534- !matches
17535- });
17536- !update_add_htlcs.is_empty()
17537- });
17520+ decode_update_add_htlcs.retain(
17521+ |src_outb_alias, update_add_htlcs| {
17522+ update_add_htlcs.retain(|update_add_htlc| {
17523+ let matches = *src_outb_alias
17524+ == prev_hop_data.prev_outbound_scid_alias
17525+ && update_add_htlc.htlc_id == prev_hop_data.htlc_id;
17526+ if matches {
17527+ log_info!(logger, "Removing pending to-decode HTLC as it was forwarded to the closed channel");
17528+ }
17529+ !matches
17530+ });
17531+ !update_add_htlcs.is_empty()
17532+ },
17533+ );
1753817534 forward_htlcs.retain(|_, forwards| {
1753917535 forwards.retain(|forward| {
1754017536 if let HTLCForwardInfo::AddHTLC(htlc_info) = forward {
0 commit comments