Skip to content

Commit

Permalink
Rollup merge of #89741 - sdroege:arc-rc-from-inner-unsafe, r=Mark-Sim…
Browse files Browse the repository at this point in the history
…ulacrum

Mark `Arc::from_inner` / `Rc::from_inner` as unsafe

While it's an internal function, it is easy to create invalid Arc/Rcs to
a dangling pointer with it.

Fixes #89740
  • Loading branch information
matthiaskrgr authored Nov 20, 2021
2 parents 93542a8 + 2e2c38e commit 09d9c09
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 32 deletions.
51 changes: 31 additions & 20 deletions library/alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -341,12 +341,12 @@ impl<T: ?Sized> Rc<T> {
unsafe { self.ptr.as_ref() }
}

fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
Self { ptr, phantom: PhantomData }
}

unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
}
}

Expand All @@ -367,9 +367,11 @@ impl<T> Rc<T> {
// pointers, which ensures that the weak destructor never frees
// the allocation while the strong destructor is running, even
// if the weak pointer is stored inside the strong one.
Self::from_inner(
Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
)
unsafe {
Self::from_inner(
Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
)
}
}

/// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
Expand Down Expand Up @@ -420,16 +422,16 @@ impl<T> Rc<T> {
// otherwise.
let data = data_fn(&weak);

unsafe {
let strong = unsafe {
let inner = init_ptr.as_ptr();
ptr::write(ptr::addr_of_mut!((*inner).value), data);

let prev_value = (*inner).strong.get();
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
(*inner).strong.set(1);
}

let strong = Rc::from_inner(init_ptr);
Rc::from_inner(init_ptr)
};

// Strong references should collectively own a shared weak reference,
// so don't run the destructor for our old weak reference.
Expand Down Expand Up @@ -521,10 +523,12 @@ impl<T> Rc<T> {
// pointers, which ensures that the weak destructor never frees
// the allocation while the strong destructor is running, even
// if the weak pointer is stored inside the strong one.
Ok(Self::from_inner(
Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
.into(),
))
unsafe {
Ok(Self::from_inner(
Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
.into(),
))
}
}

/// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
Expand Down Expand Up @@ -746,7 +750,7 @@ impl<T> Rc<mem::MaybeUninit<T>> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Rc<T> {
Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
unsafe { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) }
}
}

Expand Down Expand Up @@ -1214,9 +1218,11 @@ impl Rc<dyn Any> {
/// ```
pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
if (*self).is::<T>() {
let ptr = self.ptr.cast::<RcBox<T>>();
forget(self);
Ok(Rc::from_inner(ptr))
unsafe {
let ptr = self.ptr.cast::<RcBox<T>>();
forget(self);
Ok(Rc::from_inner(ptr))
}
} else {
Err(self)
}
Expand Down Expand Up @@ -1489,8 +1495,10 @@ impl<T: ?Sized> Clone for Rc<T> {
/// ```
#[inline]
fn clone(&self) -> Rc<T> {
self.inner().inc_strong();
Self::from_inner(self.ptr)
unsafe {
self.inner().inc_strong();
Self::from_inner(self.ptr)
}
}
}

Expand Down Expand Up @@ -2245,11 +2253,14 @@ impl<T: ?Sized> Weak<T> {
#[stable(feature = "rc_weak", since = "1.4.0")]
pub fn upgrade(&self) -> Option<Rc<T>> {
let inner = self.inner()?;

if inner.strong() == 0 {
None
} else {
inner.inc_strong();
Some(Rc::from_inner(self.ptr))
unsafe {
inner.inc_strong();
Some(Rc::from_inner(self.ptr))
}
}
}

Expand Down
26 changes: 14 additions & 12 deletions library/alloc/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}

impl<T: ?Sized> Arc<T> {
fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
unsafe fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
Self { ptr, phantom: PhantomData }
}

Expand Down Expand Up @@ -348,7 +348,7 @@ impl<T> Arc<T> {
weak: atomic::AtomicUsize::new(1),
data,
};
Self::from_inner(Box::leak(x).into())
unsafe { Self::from_inner(Box::leak(x).into()) }
}

/// Constructs a new `Arc<T>` using a weak reference to itself. Attempting
Expand Down Expand Up @@ -397,7 +397,7 @@ impl<T> Arc<T> {

// Now we can properly initialize the inner value and turn our weak
// reference into a strong reference.
unsafe {
let strong = unsafe {
let inner = init_ptr.as_ptr();
ptr::write(ptr::addr_of_mut!((*inner).data), data);

Expand All @@ -415,9 +415,9 @@ impl<T> Arc<T> {
// possible with safe code alone.
let prev_value = (*inner).strong.fetch_add(1, Release);
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
}

let strong = Arc::from_inner(init_ptr);
Arc::from_inner(init_ptr)
};

// Strong references should collectively own a shared weak reference,
// so don't run the destructor for our old weak reference.
Expand Down Expand Up @@ -529,7 +529,7 @@ impl<T> Arc<T> {
weak: atomic::AtomicUsize::new(1),
data,
})?;
Ok(Self::from_inner(Box::leak(x).into()))
unsafe { Ok(Self::from_inner(Box::leak(x).into())) }
}

/// Constructs a new `Arc` with uninitialized contents, returning an error
Expand Down Expand Up @@ -743,7 +743,7 @@ impl<T> Arc<mem::MaybeUninit<T>> {
#[must_use = "`self` will be dropped if the result is not used"]
#[inline]
pub unsafe fn assume_init(self) -> Arc<T> {
Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
unsafe { Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) }
}
}

Expand Down Expand Up @@ -1341,7 +1341,7 @@ impl<T: ?Sized> Clone for Arc<T> {
abort();
}

Self::from_inner(self.ptr)
unsafe { Self::from_inner(self.ptr) }
}
}

Expand Down Expand Up @@ -1668,9 +1668,11 @@ impl Arc<dyn Any + Send + Sync> {
T: Any + Send + Sync + 'static,
{
if (*self).is::<T>() {
let ptr = self.ptr.cast::<ArcInner<T>>();
mem::forget(self);
Ok(Arc::from_inner(ptr))
unsafe {
let ptr = self.ptr.cast::<ArcInner<T>>();
mem::forget(self);
Ok(Arc::from_inner(ptr))
}
} else {
Err(self)
}
Expand Down Expand Up @@ -1899,7 +1901,7 @@ impl<T: ?Sized> Weak<T> {
// value can be initialized after `Weak` references have already been created. In that case, we
// expect to observe the fully initialized value.
match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) {
Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above
Ok(_) => return Some(unsafe { Arc::from_inner(self.ptr) }), // null checked above
Err(old) => n = old,
}
}
Expand Down

0 comments on commit 09d9c09

Please sign in to comment.