Skip to content

Commit 1e86521

Browse files
Rebuild manager forwarded htlcs maps from Channels
XXX
1 parent 9a7c3f5 commit 1e86521

File tree

3 files changed

+126
-13
lines changed

3 files changed

+126
-13
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 36 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -11509,6 +11509,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1150911509

1151011510
if !new_intercept_events.is_empty() {
1151111511
let mut events = self.pending_events.lock().unwrap();
11512+
new_intercept_events.retain(|new_ev| !events.contains(new_ev));
1151211513
events.append(&mut new_intercept_events);
1151311514
}
1151411515
}
@@ -17153,7 +17154,11 @@ where
1715317154

1715417155
const MAX_ALLOC_SIZE: usize = 1024 * 64;
1715517156
let forward_htlcs_count: u64 = Readable::read(reader)?;
17156-
let mut forward_htlcs = hash_map_with_capacity(cmp::min(forward_htlcs_count as usize, 128));
17157+
// This map is read but may no longer be used because we'll attempt to rebuild `forward_htlcs`
17158+
// from the `Channel{Monitor}`s instead, as a step towards getting rid of `ChannelManager`
17159+
// persistence.
17160+
let mut forward_htlcs_legacy: HashMap<u64, Vec<HTLCForwardInfo>> =
17161+
hash_map_with_capacity(cmp::min(forward_htlcs_count as usize, 128));
1715717162
for _ in 0..forward_htlcs_count {
1715817163
let short_channel_id = Readable::read(reader)?;
1715917164
let pending_forwards_count: u64 = Readable::read(reader)?;
@@ -17164,7 +17169,7 @@ where
1716417169
for _ in 0..pending_forwards_count {
1716517170
pending_forwards.push(Readable::read(reader)?);
1716617171
}
17167-
forward_htlcs.insert(short_channel_id, pending_forwards);
17172+
forward_htlcs_legacy.insert(short_channel_id, pending_forwards);
1716817173
}
1716917174

1717017175
let claimable_htlcs_count: u64 = Readable::read(reader)?;
@@ -17252,12 +17257,18 @@ where
1725217257
};
1725317258
}
1725417259

17260+
// Some maps are read but may no longer be used because we attempt to rebuild pending HTLC
17261+
// forwards from the `Channel{Monitor}`s instead, as a step towards getting rid of
17262+
// `ChannelManager` persistence.
17263+
let mut pending_intercepted_htlcs_legacy: Option<HashMap<InterceptId, PendingAddHTLCInfo>> =
17264+
Some(new_hash_map());
17265+
let mut decode_update_add_htlcs_legacy: Option<HashMap<u64, Vec<msgs::UpdateAddHTLC>>> =
17266+
None;
17267+
1725517268
// pending_outbound_payments_no_retry is for compatibility with 0.0.101 clients.
1725617269
let mut pending_outbound_payments_no_retry: Option<HashMap<PaymentId, HashSet<[u8; 32]>>> =
1725717270
None;
1725817271
let mut pending_outbound_payments = None;
17259-
let mut pending_intercepted_htlcs: Option<HashMap<InterceptId, PendingAddHTLCInfo>> =
17260-
Some(new_hash_map());
1726117272
let mut received_network_pubkey: Option<PublicKey> = None;
1726217273
let mut fake_scid_rand_bytes: Option<[u8; 32]> = None;
1726317274
let mut probing_cookie_secret: Option<[u8; 32]> = None;
@@ -17275,14 +17286,12 @@ where
1727517286
let mut in_flight_monitor_updates: Option<
1727617287
HashMap<(PublicKey, ChannelId), Vec<ChannelMonitorUpdate>>,
1727717288
> = None;
17278-
let mut decode_update_add_htlcs_legacy: Option<HashMap<u64, Vec<msgs::UpdateAddHTLC>>> =
17279-
None;
1728017289
let mut inbound_payment_id_secret = None;
1728117290
let mut peer_storage_dir: Option<Vec<(PublicKey, Vec<u8>)>> = None;
1728217291
let mut async_receive_offer_cache: AsyncReceiveOfferCache = AsyncReceiveOfferCache::new();
1728317292
read_tlv_fields!(reader, {
1728417293
(1, pending_outbound_payments_no_retry, option),
17285-
(2, pending_intercepted_htlcs, option),
17294+
(2, pending_intercepted_htlcs_legacy, option),
1728617295
(3, pending_outbound_payments, option),
1728717296
(4, pending_claiming_payments, option),
1728817297
(5, received_network_pubkey, option),
@@ -17704,7 +17713,7 @@ where
1770417713
"HTLC was forwarded to the closed channel",
1770517714
&args.logger,
1770617715
);
17707-
forward_htlcs.retain(|_, forwards| {
17716+
forward_htlcs_legacy.retain(|_, forwards| {
1770817717
forwards.retain(|forward| {
1770917718
if let HTLCForwardInfo::AddHTLC(htlc_info) = forward {
1771017719
if pending_forward_matches_htlc(&htlc_info) {
@@ -17716,7 +17725,7 @@ where
1771617725
});
1771717726
!forwards.is_empty()
1771817727
});
17719-
pending_intercepted_htlcs.as_mut().unwrap().retain(|intercepted_id, htlc_info| {
17728+
pending_intercepted_htlcs_legacy.as_mut().unwrap().retain(|intercepted_id, htlc_info| {
1772017729
if pending_forward_matches_htlc(&htlc_info) {
1772117730
log_info!(logger, "Removing pending intercepted HTLC with hash {} as it was forwarded to the closed channel {}",
1772217731
&htlc.payment_hash, &monitor.channel_id());
@@ -18224,6 +18233,22 @@ where
1822418233
)
1822518234
.with_async_payments_offers_cache(async_receive_offer_cache);
1822618235

18236+
// If we are reading from a `ChannelManager` that was last serialized on LDK 0.2 or earlier, we
18237+
// won't have been able to rebuild `decode_update_add_htlcs` from `Channel`s and should use
18238+
// the legacy serialized maps instead.
18239+
// TODO: if we read an upgraded channel but there just happened to be no committed update_adds
18240+
// present, we'll use the old maps here. Maybe that's fine but we might want to add a flag in
18241+
// the `Channel` that indicates it is upgraded and will serialize committed update_adds.
18242+
let (forward_htlcs, decode_update_add_htlcs, pending_intercepted_htlcs) =
18243+
if decode_update_add_htlcs.is_empty() {
18244+
(
18245+
forward_htlcs_legacy,
18246+
decode_update_add_htlcs_legacy,
18247+
pending_intercepted_htlcs_legacy.unwrap(),
18248+
)
18249+
} else {
18250+
(new_hash_map(), decode_update_add_htlcs, new_hash_map())
18251+
};
1822718252
let channel_manager = ChannelManager {
1822818253
chain_hash,
1822918254
fee_estimator: bounded_fee_estimator,
@@ -18236,10 +18261,10 @@ where
1823618261

1823718262
inbound_payment_key: expanded_inbound_key,
1823818263
pending_outbound_payments: pending_outbounds,
18239-
pending_intercepted_htlcs: Mutex::new(pending_intercepted_htlcs.unwrap()),
18264+
pending_intercepted_htlcs: Mutex::new(pending_intercepted_htlcs),
1824018265

1824118266
forward_htlcs: Mutex::new(forward_htlcs),
18242-
decode_update_add_htlcs: Mutex::new(decode_update_add_htlcs_legacy),
18267+
decode_update_add_htlcs: Mutex::new(decode_update_add_htlcs),
1824318268
claimable_payments: Mutex::new(ClaimablePayments {
1824418269
claimable_payments,
1824518270
pending_claiming_payments: pending_claiming_payments.unwrap(),

lightning/src/ln/functional_test_utils.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1382,9 +1382,10 @@ macro_rules! reload_node {
13821382
$node.onion_messenger.set_async_payments_handler(&$new_channelmanager);
13831383
};
13841384
($node: expr, $chanman_encoded: expr, $monitors_encoded: expr, $persister: ident, $new_chain_monitor: ident, $new_channelmanager: ident) => {
1385+
let config = $node.node.get_current_config();
13851386
reload_node!(
13861387
$node,
1387-
test_default_channel_config(),
1388+
config,
13881389
$chanman_encoded,
13891390
$monitors_encoded,
13901391
$persister,

lightning/src/ln/reload_tests.rs

Lines changed: 88 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ use crate::chain::transaction::OutPoint;
2020
use crate::events::{ClosureReason, Event, HTLCHandlingFailureType};
2121
use crate::ln::channelmanager::{ChannelManager, ChannelManagerReadArgs, PaymentId, RecipientOnionFields, RAACommitmentOrder};
2222
use crate::ln::msgs;
23+
use crate::ln::outbound_payment::Retry;
2324
use crate::ln::types::ChannelId;
2425
use crate::ln::msgs::{BaseMessageHandler, ChannelMessageHandler, RoutingMessageHandler, ErrorAction, MessageSendEvent};
2526
use crate::util::test_channel_signer::TestChannelSigner;
@@ -508,7 +509,6 @@ fn test_manager_serialize_deserialize_inconsistent_monitor() {
508509

509510
#[cfg(feature = "std")]
510511
fn do_test_data_loss_protect(reconnect_panicing: bool, substantially_old: bool, not_stale: bool) {
511-
use crate::ln::channelmanager::Retry;
512512
use crate::types::string::UntrustedString;
513513
// When we get a data_loss_protect proving we're behind, we immediately panic as the
514514
// chain::Watch API requirements have been violated (e.g. the user restored from a backup). The
@@ -1173,6 +1173,93 @@ fn removed_payment_no_manager_persistence() {
11731173
expect_payment_failed!(nodes[0], payment_hash, false);
11741174
}
11751175

1176+
#[test]
1177+
fn manager_persisted_pre_htlc_forward_on_outbound_edge() {
1178+
do_manager_persisted_pre_htlc_forward_on_outbound_edge(false);
1179+
}
1180+
1181+
#[test]
1182+
fn manager_persisted_pre_intercept_forward_on_outbound_edge() {
1183+
do_manager_persisted_pre_htlc_forward_on_outbound_edge(true);
1184+
}
1185+
1186+
fn do_manager_persisted_pre_htlc_forward_on_outbound_edge(intercept_htlc: bool) {
1187+
let chanmon_cfgs = create_chanmon_cfgs(3);
1188+
let node_cfgs = create_node_cfgs(3, &chanmon_cfgs);
1189+
let persister;
1190+
let new_chain_monitor;
1191+
let mut intercept_forwards_config = test_default_channel_config();
1192+
intercept_forwards_config.accept_intercept_htlcs = true;
1193+
let node_chanmgrs = create_node_chanmgrs(3, &node_cfgs, &[None, Some(intercept_forwards_config), None]);
1194+
let nodes_1_deserialized;
1195+
let mut nodes = create_network(3, &node_cfgs, &node_chanmgrs);
1196+
1197+
let chan_id_1 = create_announced_chan_between_nodes(&nodes, 0, 1).2;
1198+
let chan_id_2 = create_announced_chan_between_nodes(&nodes, 1, 2).2;
1199+
1200+
let intercept_scid = nodes[1].node.get_intercept_scid();
1201+
1202+
// Lock in the HTLC from node_a <> node_b.
1203+
let amt_msat = 5000;
1204+
let (mut route, payment_hash, payment_preimage, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[2], amt_msat);
1205+
if intercept_htlc {
1206+
route.paths[0].hops[1].short_channel_id = intercept_scid;
1207+
}
1208+
nodes[0].node.send_payment_with_route(route, payment_hash, RecipientOnionFields::secret_only(payment_secret), PaymentId(payment_hash.0)).unwrap();
1209+
check_added_monitors(&nodes[0], 1);
1210+
let updates = get_htlc_update_msgs(&nodes[0], &nodes[1].node.get_our_node_id());
1211+
nodes[1].node.handle_update_add_htlc(nodes[0].node.get_our_node_id(), &updates.update_add_htlcs[0]);
1212+
do_commitment_signed_dance(&nodes[1], &nodes[0], &updates.commitment_signed, false, false);
1213+
1214+
// Decode the HTLC onion but don't forward it to the next hop, such that the HTLC ends up in
1215+
// `ChannelManager::forward_htlcs` or `ChannelManager::pending_intercepted_htlcs`.
1216+
nodes[1].node.process_pending_update_add_htlcs();
1217+
1218+
// Disconnect peers and reload the forwarding node_b.
1219+
nodes[0].node.peer_disconnected(nodes[1].node.get_our_node_id());
1220+
nodes[2].node.peer_disconnected(nodes[1].node.get_our_node_id());
1221+
1222+
let node_b_encoded = nodes[1].node.encode();
1223+
1224+
let chan_0_monitor_serialized = get_monitor!(nodes[1], chan_id_1).encode();
1225+
let chan_1_monitor_serialized = get_monitor!(nodes[1], chan_id_2).encode();
1226+
reload_node!(nodes[1], node_b_encoded, &[&chan_0_monitor_serialized, &chan_1_monitor_serialized], persister, new_chain_monitor, nodes_1_deserialized);
1227+
1228+
reconnect_nodes(ReconnectArgs::new(&nodes[1], &nodes[0]));
1229+
let mut args_b_c = ReconnectArgs::new(&nodes[1], &nodes[2]);
1230+
args_b_c.send_channel_ready = (true, true);
1231+
args_b_c.send_announcement_sigs = (true, true);
1232+
reconnect_nodes(args_b_c);
1233+
1234+
// Forward the HTLC and ensure we can claim it post-reload.
1235+
nodes[1].node.process_pending_htlc_forwards();
1236+
1237+
if intercept_htlc {
1238+
let events = nodes[1].node.get_and_clear_pending_events();
1239+
assert_eq!(events.len(), 1);
1240+
let (intercept_id, expected_outbound_amt_msat) = match events[0] {
1241+
Event::HTLCIntercepted { intercept_id, expected_outbound_amount_msat, .. } => {
1242+
(intercept_id, expected_outbound_amount_msat)
1243+
},
1244+
_ => panic!()
1245+
};
1246+
nodes[1].node.forward_intercepted_htlc(intercept_id, &chan_id_2,
1247+
nodes[2].node.get_our_node_id(), expected_outbound_amt_msat).unwrap();
1248+
nodes[1].node.process_pending_htlc_forwards();
1249+
}
1250+
check_added_monitors(&nodes[1], 1);
1251+
1252+
let updates = get_htlc_update_msgs(&nodes[1], &nodes[2].node.get_our_node_id());
1253+
nodes[2].node.handle_update_add_htlc(nodes[1].node.get_our_node_id(), &updates.update_add_htlcs[0]);
1254+
do_commitment_signed_dance(&nodes[2], &nodes[1], &updates.commitment_signed, false, false);
1255+
expect_and_process_pending_htlcs(&nodes[2], false);
1256+
1257+
expect_payment_claimable!(nodes[2], payment_hash, payment_secret, amt_msat, None, nodes[2].node.get_our_node_id());
1258+
let path: &[&[_]] = &[&[&nodes[1], &nodes[2]]];
1259+
do_claim_payment_along_route(ClaimAlongRouteArgs::new(&nodes[0], path, payment_preimage));
1260+
expect_payment_sent(&nodes[0], payment_preimage, None, true, true);
1261+
}
1262+
11761263
#[test]
11771264
fn test_reload_partial_funding_batch() {
11781265
let chanmon_cfgs = create_chanmon_cfgs(3);

0 commit comments

Comments
 (0)