@@ -2150,7 +2150,8 @@ unsafe impl<T: ?Sized, A: Allocator> DerefPure for Arc<T, A> {}
21502150#[ unstable( feature = "receiver_trait" ,  issue = "none" ) ]  
21512151impl < T :  ?Sized >  Receiver  for  Arc < T >  { } 
21522152
2153- impl < T :  Clone ,  A :  Allocator  + Clone >  Arc < T ,  A >  { 
2153+ #[ cfg( not( no_global_oom_handling) ) ]  
2154+ impl < T :  ?Sized  + CloneToUninit ,  A :  Allocator  + Clone >  Arc < T ,  A >  { 
21542155    /// Makes a mutable reference into the given `Arc`. 
21552156     /// 
21562157     /// If there are other `Arc` pointers to the same allocation, then `make_mut` will 
@@ -2201,10 +2202,11 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
22012202     /// assert!(76 == *data); 
22022203     /// assert!(weak.upgrade().is_none()); 
22032204     /// ``` 
2204-      #[ cfg( not( no_global_oom_handling) ) ]  
22052205     #[ inline]  
22062206    #[ stable( feature = "arc_unique" ,  since = "1.4.0" ) ]  
22072207    pub  fn  make_mut ( this :  & mut  Self )  -> & mut  T  { 
2208+         let  size_of_val = mem:: size_of_val :: < T > ( & * * this) ; 
2209+ 
22082210        // Note that we hold both a strong reference and a weak reference. 
22092211        // Thus, releasing our strong reference only will not, by itself, cause 
22102212        // the memory to be deallocated. 
@@ -2215,13 +2217,19 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
22152217        // deallocated. 
22162218        if  this. inner ( ) . strong . compare_exchange ( 1 ,  0 ,  Acquire ,  Relaxed ) . is_err ( )  { 
22172219            // Another strong pointer exists, so we must clone. 
2218-             // Pre-allocate memory to allow writing the cloned value directly. 
2219-             let  mut  arc = Self :: new_uninit_in ( this. alloc . clone ( ) ) ; 
2220-             unsafe  { 
2221-                 let  data = Arc :: get_mut_unchecked ( & mut  arc) ; 
2222-                 ( * * this) . clone_to_uninit ( data. as_mut_ptr ( ) ) ; 
2223-                 * this = arc. assume_init ( ) ; 
2224-             } 
2220+ 
2221+             let  this_data_ref:  & T  = & * * this; 
2222+             // `in_progress` drops the allocation if we panic before finishing initializing it. 
2223+             let  mut  in_progress:  UniqueArcUninit < T ,  A >  =
2224+                 UniqueArcUninit :: new ( this_data_ref,  this. alloc . clone ( ) ) ; 
2225+ 
2226+             let  initialized_clone = unsafe  { 
2227+                 // Clone. If the clone panics, `in_progress` will be dropped and clean up. 
2228+                 this_data_ref. clone_to_uninit ( in_progress. data_ptr ( ) ) ; 
2229+                 // Cast type of pointer, now that it is initialized. 
2230+                 in_progress. into_arc ( ) 
2231+             } ; 
2232+             * this = initialized_clone; 
22252233        }  else  if  this. inner ( ) . weak . load ( Relaxed )  != 1  { 
22262234            // Relaxed suffices in the above because this is fundamentally an 
22272235            // optimization: we are always racing with weak pointers being 
@@ -2240,11 +2248,22 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
22402248            let  _weak = Weak  {  ptr :  this. ptr ,  alloc :  this. alloc . clone ( )  } ; 
22412249
22422250            // Can just steal the data, all that's left is Weaks 
2243-             let  mut  arc = Self :: new_uninit_in ( this. alloc . clone ( ) ) ; 
2251+             // 
2252+             // We don't need panic-protection like the above branch does, but we might as well 
2253+             // use the same mechanism. 
2254+             let  mut  in_progress:  UniqueArcUninit < T ,  A >  =
2255+                 UniqueArcUninit :: new ( & * * this,  this. alloc . clone ( ) ) ; 
22442256            unsafe  { 
2245-                 let  data = Arc :: get_mut_unchecked ( & mut  arc) ; 
2246-                 data. as_mut_ptr ( ) . copy_from_nonoverlapping ( & * * this,  1 ) ; 
2247-                 ptr:: write ( this,  arc. assume_init ( ) ) ; 
2257+                 // Initialize `in_progress` with move of **this. 
2258+                 // We have to express this in terms of bytes because `T: ?Sized`; there is no 
2259+                 // operation that just copies a value based on its `size_of_val()`. 
2260+                 ptr:: copy_nonoverlapping ( 
2261+                     ptr:: from_ref ( & * * this) . cast :: < u8 > ( ) , 
2262+                     in_progress. data_ptr ( ) . cast :: < u8 > ( ) , 
2263+                     size_of_val, 
2264+                 ) ; 
2265+ 
2266+                 ptr:: write ( this,  in_progress. into_arc ( ) ) ; 
22482267            } 
22492268        }  else  { 
22502269            // We were the sole reference of either kind; bump back up the 
@@ -3809,6 +3828,68 @@ fn data_offset_align(align: usize) -> usize {
38093828    layout. size ( )  + layout. padding_needed_for ( align) 
38103829} 
38113830
3831+ /// A unique owning pointer to a [`ArcInner`] **that does not imply the contents are initialized,** 
3832+ /// but will deallocate it (without dropping the value) when dropped. 
3833+ /// 
3834+ /// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic. 
3835+ #[ cfg( not( no_global_oom_handling) ) ]  
3836+ struct  UniqueArcUninit < T :  ?Sized ,  A :  Allocator >  { 
3837+     ptr :  NonNull < ArcInner < T > > , 
3838+     layout_for_value :  Layout , 
3839+     alloc :  Option < A > , 
3840+ } 
3841+ 
3842+ #[ cfg( not( no_global_oom_handling) ) ]  
3843+ impl < T :  ?Sized ,  A :  Allocator >  UniqueArcUninit < T ,  A >  { 
3844+     /// Allocate a ArcInner with layout suitable to contain `for_value` or a clone of it. 
3845+      fn  new ( for_value :  & T ,  alloc :  A )  -> UniqueArcUninit < T ,  A >  { 
3846+         let  layout = Layout :: for_value ( for_value) ; 
3847+         let  ptr = unsafe  { 
3848+             Arc :: allocate_for_layout ( 
3849+                 layout, 
3850+                 |layout_for_arcinner| alloc. allocate ( layout_for_arcinner) , 
3851+                 |mem| mem. with_metadata_of ( ptr:: from_ref ( for_value)  as  * const  ArcInner < T > ) , 
3852+             ) 
3853+         } ; 
3854+         Self  {  ptr :  NonNull :: new ( ptr) . unwrap ( ) ,  layout_for_value :  layout,  alloc :  Some ( alloc)  } 
3855+     } 
3856+ 
3857+     /// Returns the pointer to be written into to initialize the [`Arc`]. 
3858+      fn  data_ptr ( & mut  self )  -> * mut  T  { 
3859+         let  offset = data_offset_align ( self . layout_for_value . align ( ) ) ; 
3860+         unsafe  {  self . ptr . as_ptr ( ) . byte_add ( offset)  as  * mut  T  } 
3861+     } 
3862+ 
3863+     /// Upgrade this into a normal [`Arc`]. 
3864+      /// 
3865+      /// # Safety 
3866+      /// 
3867+      /// The data must have been initialized (by writing to [`Self::data_ptr()`]). 
3868+      unsafe  fn  into_arc ( mut  self )  -> Arc < T ,  A >  { 
3869+         let  ptr = self . ptr ; 
3870+         let  alloc = self . alloc . take ( ) . unwrap ( ) ; 
3871+         mem:: forget ( self ) ; 
3872+         // SAFETY: The pointer is valid as per `UniqueArcUninit::new`, and the caller is responsible 
3873+         // for having initialized the data. 
3874+         unsafe  {  Arc :: from_ptr_in ( ptr. as_ptr ( ) ,  alloc)  } 
3875+     } 
3876+ } 
3877+ 
3878+ #[ cfg( not( no_global_oom_handling) ) ]  
3879+ impl < T :  ?Sized ,  A :  Allocator >  Drop  for  UniqueArcUninit < T ,  A >  { 
3880+     fn  drop ( & mut  self )  { 
3881+         // SAFETY: 
3882+         // * new() produced a pointer safe to deallocate. 
3883+         // * We own the pointer unless into_arc() was called, which forgets us. 
3884+         unsafe  { 
3885+             self . alloc . take ( ) . unwrap ( ) . deallocate ( 
3886+                 self . ptr . cast ( ) , 
3887+                 arcinner_layout_for_value_layout ( self . layout_for_value ) , 
3888+             ) ; 
3889+         } 
3890+     } 
3891+ } 
3892+ 
38123893#[ stable( feature = "arc_error" ,  since = "1.52.0" ) ]  
38133894impl < T :  core:: error:: Error  + ?Sized >  core:: error:: Error  for  Arc < T >  { 
38143895    #[ allow( deprecated,  deprecated_in_future) ]  
0 commit comments