@@ -2047,8 +2047,12 @@ macro_rules! handle_monitor_update_completion {
20472047 ));
20482048 if let Some(channel_state) = channel_state {
20492049 channel_state.2 = true;
2050+ } else {
2051+ debug_assert!(false, "Missing channel batch state for channel which completed initial monitor update");
20502052 }
20512053 batch_completed = batch_state.iter().all(|(_, _, completed)| *completed);
2054+ } else {
2055+ debug_assert!(false, "Missing batch state for channel which completed initial monitor update");
20522056 }
20532057
20542058 // When all channels in a batched funding transaction have become ready, it is not necessary
@@ -2707,7 +2711,8 @@ where
27072711 let mut funding_batch_states = self.funding_batch_states.lock().unwrap();
27082712 let affected_channels = funding_batch_states.remove(&txid).into_iter().flatten();
27092713 let per_peer_state = self.per_peer_state.read().unwrap();
2710- for (channel_id, counterparty_node_id, _state) in affected_channels {
2714+ let mut has_uncompleted_channel = None;
2715+ for (channel_id, counterparty_node_id, state) in affected_channels {
27112716 if let Some(peer_state_mutex) = per_peer_state.get(&counterparty_node_id) {
27122717 let mut peer_state = peer_state_mutex.lock().unwrap();
27132718 if let Some(mut chan) = peer_state.channel_by_id.remove(&channel_id) {
@@ -2716,7 +2721,12 @@ where
27162721 shutdown_results.push(chan.context_mut().force_shutdown(false));
27172722 }
27182723 }
2724+ has_uncompleted_channel = Some(has_uncompleted_channel.map_or(!state, |v| v || !state));
27192725 }
2726+ debug_assert!(
2727+ has_uncompleted_channel.unwrap_or(true),
2728+ "Closing a batch where all channels have completed initial monitor update",
2729+ );
27202730 }
27212731 for shutdown_result in shutdown_results.drain(..) {
27222732 self.finish_close_channel(shutdown_result);
@@ -3828,7 +3838,7 @@ where
38283838
38293839 /// Call this upon creation of a batch funding transaction for the given channels.
38303840 ///
3831- /// Return values are identical to ` funding_transaction_generated`, respective to
3841+ /// Return values are identical to [`Self:: funding_transaction_generated`] , respective to
38323842 /// each individual channel and transaction output.
38333843 ///
38343844 /// Do NOT broadcast the funding transaction yourself. This batch funding transcaction
@@ -3867,7 +3877,6 @@ where
38673877 }
38683878 }
38693879
3870-
38713880 let txid = funding_transaction.txid();
38723881 let is_batch_funding = temporary_channels.len() > 1;
38733882 let mut funding_batch_states = if is_batch_funding {
@@ -3876,8 +3885,15 @@ where
38763885 None
38773886 };
38783887 let mut funding_batch_state = funding_batch_states.as_mut().and_then(|states| {
3879- states.insert(txid, Vec::new());
3880- states.get_mut(&txid)
3888+ if states.contains_key(&txid) {
3889+ result = result.clone().and(Err(APIError::APIMisuseError {
3890+ err: "Batch funding transaction with the same txid already exists".to_owned()
3891+ }));
3892+ None
3893+ } else {
3894+ states.insert(txid, Vec::new());
3895+ states.get_mut(&txid)
3896+ }
38813897 });
38823898 for (channel_idx, &(temporary_channel_id, counterparty_node_id)) in temporary_channels.iter().enumerate() {
38833899 result = result.and_then(|_| self.funding_transaction_generated_intern(
@@ -7995,35 +8011,24 @@ where
79958011 peer_state.channel_by_id.retain(|_, phase| {
79968012 let context = match phase {
79978013 ChannelPhase::Funded(chan) => {
7998- if !chan.context.is_funding_broadcast() {
7999- update_maps_on_chan_removal!(self, &chan.context);
8000- self.issue_channel_close_events(&chan.context, ClosureReason::DisconnectedPeer);
8001- // It is possible to have persisted the monitor upon funding_signed
8002- // but not have broadcast the transaction, especially for batch funding.
8003- // The monitor should be moved to the correct state.
8004- failed_channels.push(chan.context.force_shutdown(false));
8005- return false;
8006- } else {
8007- chan.remove_uncommitted_htlcs_and_mark_paused(&self.logger);
8014+ if chan.remove_uncommitted_htlcs_and_mark_paused(&self.logger).is_ok() {
80088015 // We only retain funded channels that are not shutdown.
8009- if !chan.is_shutdown() {
8010- return true;
8011- }
8016+ return true;
80128017 }
8013- &chan.context
8018+ &mut chan.context
80148019 },
80158020 // Unfunded channels will always be removed.
80168021 ChannelPhase::UnfundedOutboundV1(chan) => {
8017- &chan.context
8022+ &mut chan.context
80188023 },
80198024 ChannelPhase::UnfundedInboundV1(chan) => {
8020- &chan.context
8025+ &mut chan.context
80218026 },
80228027 };
80238028 // Clean up for removal.
80248029 update_maps_on_chan_removal!(self, &context);
80258030 self.issue_channel_close_events(&context, ClosureReason::DisconnectedPeer);
8026- failed_channels.push((None, Vec::new(), None ));
8031+ failed_channels.push(context.force_shutdown(false ));
80278032 false
80288033 });
80298034 // Note that we don't bother generating any events for pre-accept channels -
@@ -9262,7 +9267,10 @@ where
92629267 log_error!(args.logger, " The ChannelMonitor for channel {} is at counterparty commitment transaction number {} but the ChannelManager is at counterparty commitment transaction number {}.",
92639268 &channel.context.channel_id(), monitor.get_cur_counterparty_commitment_number(), channel.get_cur_counterparty_commitment_transaction_number());
92649269 }
9265- let (monitor_update, mut new_failed_htlcs, _batch_funding_txid) = channel.context.force_shutdown(true);
9270+ let (monitor_update, mut new_failed_htlcs, batch_funding_txid) = channel.context.force_shutdown(true);
9271+ if batch_funding_txid.is_some() {
9272+ return Err(DecodeError::InvalidValue);
9273+ }
92669274 if let Some((counterparty_node_id, funding_txo, update)) = monitor_update {
92679275 close_background_events.push(BackgroundEvent::MonitorUpdateRegeneratedOnStartup {
92689276 counterparty_node_id, funding_txo, update
0 commit comments