@@ -23,7 +23,7 @@ use core::borrow;
2323use core:: fmt;
2424use core:: cmp:: Ordering ;
2525use core:: intrinsics:: abort;
26- use core:: mem:: { self , align_of_val, size_of_val, uninitialized } ;
26+ use core:: mem:: { self , align_of_val, size_of_val} ;
2727use core:: ops:: Deref ;
2828use core:: ops:: CoerceUnsized ;
2929use core:: ptr:: { self , NonNull } ;
@@ -43,6 +43,9 @@ use vec::Vec;
4343/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
4444const MAX_REFCOUNT : usize = ( isize:: MAX ) as usize ;
4545
46+ /// A sentinel value that is used for the pointer of `Weak::new()`.
47+ const WEAK_EMPTY : usize = 1 ;
48+
4649/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
4750/// Reference Counted'.
4851///
@@ -235,6 +238,10 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
235238/// [`None`]: ../../std/option/enum.Option.html#variant.None
236239#[ stable( feature = "arc_weak" , since = "1.4.0" ) ]
237240pub struct Weak < T : ?Sized > {
241+ // This is a `NonNull` to allow optimizing the size of this type in enums,
242+ // but it is actually not truly "non-null". A `Weak::new()` will set this
243+ // to a sentinel value, instead of needing to allocate some space in the
244+ // heap.
238245 ptr : NonNull < ArcInner < T > > ,
239246}
240247
@@ -1011,8 +1018,8 @@ impl Arc<Any + Send + Sync> {
10111018}
10121019
10131020impl < T > Weak < T > {
1014- /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
1015- /// it. Calling [`upgrade`] on the return value always gives [`None`].
1021+ /// Constructs a new `Weak<T>`, without allocating any memory.
1022+ /// Calling [`upgrade`] on the return value always gives [`None`].
10161023 ///
10171024 /// [`upgrade`]: struct.Weak.html#method.upgrade
10181025 /// [`None`]: ../../std/option/enum.Option.html#variant.None
@@ -1029,11 +1036,7 @@ impl<T> Weak<T> {
10291036 pub fn new ( ) -> Weak < T > {
10301037 unsafe {
10311038 Weak {
1032- ptr : Box :: into_raw_non_null ( box ArcInner {
1033- strong : atomic:: AtomicUsize :: new ( 0 ) ,
1034- weak : atomic:: AtomicUsize :: new ( 1 ) ,
1035- data : uninitialized ( ) ,
1036- } ) ,
1039+ ptr : NonNull :: new_unchecked ( WEAK_EMPTY as * mut _ ) ,
10371040 }
10381041 }
10391042 }
@@ -1070,7 +1073,11 @@ impl<T: ?Sized> Weak<T> {
10701073 pub fn upgrade ( & self ) -> Option < Arc < T > > {
10711074 // We use a CAS loop to increment the strong count instead of a
10721075 // fetch_add because once the count hits 0 it must never be above 0.
1073- let inner = self . inner ( ) ;
1076+ let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1077+ return None ;
1078+ } else {
1079+ unsafe { self . ptr . as_ref ( ) }
1080+ } ;
10741081
10751082 // Relaxed load because any write of 0 that we can observe
10761083 // leaves the field in a permanently zero state (so a
@@ -1092,17 +1099,15 @@ impl<T: ?Sized> Weak<T> {
10921099
10931100 // Relaxed is valid for the same reason it is on Arc's Clone impl
10941101 match inner. strong . compare_exchange_weak ( n, n + 1 , Relaxed , Relaxed ) {
1095- Ok ( _) => return Some ( Arc { ptr : self . ptr , phantom : PhantomData } ) ,
1102+ Ok ( _) => return Some ( Arc {
1103+ // null checked above
1104+ ptr : self . ptr ,
1105+ phantom : PhantomData ,
1106+ } ) ,
10961107 Err ( old) => n = old,
10971108 }
10981109 }
10991110 }
1100-
1101- #[ inline]
1102- fn inner ( & self ) -> & ArcInner < T > {
1103- // See comments above for why this is "safe"
1104- unsafe { self . ptr . as_ref ( ) }
1105- }
11061111}
11071112
11081113#[ stable( feature = "arc_weak" , since = "1.4.0" ) ]
@@ -1120,11 +1125,16 @@ impl<T: ?Sized> Clone for Weak<T> {
11201125 /// ```
11211126 #[ inline]
11221127 fn clone ( & self ) -> Weak < T > {
1128+ let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1129+ return Weak { ptr : self . ptr } ;
1130+ } else {
1131+ unsafe { self . ptr . as_ref ( ) }
1132+ } ;
11231133 // See comments in Arc::clone() for why this is relaxed. This can use a
11241134 // fetch_add (ignoring the lock) because the weak count is only locked
11251135 // where are *no other* weak pointers in existence. (So we can't be
11261136 // running this code in that case).
1127- let old_size = self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
1137+ let old_size = inner. weak . fetch_add ( 1 , Relaxed ) ;
11281138
11291139 // See comments in Arc::clone() for why we do this (for mem::forget).
11301140 if old_size > MAX_REFCOUNT {
@@ -1139,8 +1149,8 @@ impl<T: ?Sized> Clone for Weak<T> {
11391149
11401150#[ stable( feature = "downgraded_weak" , since = "1.10.0" ) ]
11411151impl < T > Default for Weak < T > {
1142- /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
1143- /// it. Calling [`upgrade`] on the return value always gives [`None`].
1152+ /// Constructs a new `Weak<T>`, without allocating memory.
1153+ /// Calling [`upgrade`] on the return value always gives [`None`].
11441154 ///
11451155 /// [`upgrade`]: struct.Weak.html#method.upgrade
11461156 /// [`None`]: ../../std/option/enum.Option.html#variant.None
@@ -1193,7 +1203,13 @@ impl<T: ?Sized> Drop for Weak<T> {
11931203 // weak count can only be locked if there was precisely one weak ref,
11941204 // meaning that drop could only subsequently run ON that remaining weak
11951205 // ref, which can only happen after the lock is released.
1196- if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
1206+ let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1207+ return ;
1208+ } else {
1209+ unsafe { self . ptr . as_ref ( ) }
1210+ } ;
1211+
1212+ if inner. weak . fetch_sub ( 1 , Release ) == 1 {
11971213 atomic:: fence ( Acquire ) ;
11981214 unsafe {
11991215 Global . dealloc ( self . ptr . cast ( ) , Layout :: for_value ( self . ptr . as_ref ( ) ) )
0 commit comments