diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 03e33a1ff2bc3..c4e5e44fec0b2 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -341,12 +341,12 @@ impl Rc { unsafe { self.ptr.as_ref() } } - fn from_inner(ptr: NonNull>) -> Self { + unsafe fn from_inner(ptr: NonNull>) -> Self { Self { ptr, phantom: PhantomData } } unsafe fn from_ptr(ptr: *mut RcBox) -> Self { - Self::from_inner(unsafe { NonNull::new_unchecked(ptr) }) + unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } } } @@ -367,9 +367,11 @@ impl Rc { // pointers, which ensures that the weak destructor never frees // the allocation while the strong destructor is running, even // if the weak pointer is stored inside the strong one. - Self::from_inner( - Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(), - ) + unsafe { + Self::from_inner( + Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(), + ) + } } /// Constructs a new `Rc` using a weak reference to itself. Attempting @@ -420,16 +422,16 @@ impl Rc { // otherwise. let data = data_fn(&weak); - unsafe { + let strong = unsafe { let inner = init_ptr.as_ptr(); ptr::write(ptr::addr_of_mut!((*inner).value), data); let prev_value = (*inner).strong.get(); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); (*inner).strong.set(1); - } - let strong = Rc::from_inner(init_ptr); + Rc::from_inner(init_ptr) + }; // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. @@ -521,10 +523,12 @@ impl Rc { // pointers, which ensures that the weak destructor never frees // the allocation while the strong destructor is running, even // if the weak pointer is stored inside the strong one. - Ok(Self::from_inner( - Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?) - .into(), - )) + unsafe { + Ok(Self::from_inner( + Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?) + .into(), + )) + } } /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails @@ -746,7 +750,7 @@ impl Rc> { #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Rc { - Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) + unsafe { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) } } } @@ -1214,9 +1218,11 @@ impl Rc { /// ``` pub fn downcast(self) -> Result, Rc> { if (*self).is::() { - let ptr = self.ptr.cast::>(); - forget(self); - Ok(Rc::from_inner(ptr)) + unsafe { + let ptr = self.ptr.cast::>(); + forget(self); + Ok(Rc::from_inner(ptr)) + } } else { Err(self) } @@ -1489,8 +1495,10 @@ impl Clone for Rc { /// ``` #[inline] fn clone(&self) -> Rc { - self.inner().inc_strong(); - Self::from_inner(self.ptr) + unsafe { + self.inner().inc_strong(); + Self::from_inner(self.ptr) + } } } @@ -2245,11 +2253,14 @@ impl Weak { #[stable(feature = "rc_weak", since = "1.4.0")] pub fn upgrade(&self) -> Option> { let inner = self.inner()?; + if inner.strong() == 0 { None } else { - inner.inc_strong(); - Some(Rc::from_inner(self.ptr)) + unsafe { + inner.inc_strong(); + Some(Rc::from_inner(self.ptr)) + } } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index b738337a2ddd1..733a898b285ac 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -252,7 +252,7 @@ impl, U: ?Sized> CoerceUnsized> for Arc {} impl, U: ?Sized> DispatchFromDyn> for Arc {} impl Arc { - fn from_inner(ptr: NonNull>) -> Self { + unsafe fn from_inner(ptr: NonNull>) -> Self { Self { ptr, phantom: PhantomData } } @@ -348,7 +348,7 @@ impl Arc { weak: atomic::AtomicUsize::new(1), data, }; - Self::from_inner(Box::leak(x).into()) + unsafe { Self::from_inner(Box::leak(x).into()) } } /// Constructs a new `Arc` using a weak reference to itself. Attempting @@ -397,7 +397,7 @@ impl Arc { // Now we can properly initialize the inner value and turn our weak // reference into a strong reference. - unsafe { + let strong = unsafe { let inner = init_ptr.as_ptr(); ptr::write(ptr::addr_of_mut!((*inner).data), data); @@ -415,9 +415,9 @@ impl Arc { // possible with safe code alone. let prev_value = (*inner).strong.fetch_add(1, Release); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - } - let strong = Arc::from_inner(init_ptr); + Arc::from_inner(init_ptr) + }; // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. @@ -529,7 +529,7 @@ impl Arc { weak: atomic::AtomicUsize::new(1), data, })?; - Ok(Self::from_inner(Box::leak(x).into())) + unsafe { Ok(Self::from_inner(Box::leak(x).into())) } } /// Constructs a new `Arc` with uninitialized contents, returning an error @@ -743,7 +743,7 @@ impl Arc> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc { - Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) + unsafe { Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) } } } @@ -1341,7 +1341,7 @@ impl Clone for Arc { abort(); } - Self::from_inner(self.ptr) + unsafe { Self::from_inner(self.ptr) } } } @@ -1668,9 +1668,11 @@ impl Arc { T: Any + Send + Sync + 'static, { if (*self).is::() { - let ptr = self.ptr.cast::>(); - mem::forget(self); - Ok(Arc::from_inner(ptr)) + unsafe { + let ptr = self.ptr.cast::>(); + mem::forget(self); + Ok(Arc::from_inner(ptr)) + } } else { Err(self) } @@ -1899,7 +1901,7 @@ impl Weak { // value can be initialized after `Weak` references have already been created. In that case, we // expect to observe the fully initialized value. match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) { - Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above + Ok(_) => return Some(unsafe { Arc::from_inner(self.ptr) }), // null checked above Err(old) => n = old, } }