From d72ace16d3340f0052f79e887e61a50d2a01476c Mon Sep 17 00:00:00 2001 From: Kevin Reid Date: Sun, 24 Sep 2023 08:55:19 -0700 Subject: [PATCH 1/3] Add `core::clone::CloneToUninit`. This trait allows cloning DSTs, but is unsafe to implement and use because it writes to possibly-uninitialized memory which must be of the correct size, and must initialize that memory. It is only implemented for `T: Clone` and `[T] where T: Clone`, but additional implementations could be provided for specific `dyn Trait` or custom-DST types. --- library/core/src/clone.rs | 186 ++++++++++++++++++++++++++++++++++++ library/core/tests/clone.rs | 65 +++++++++++++ library/core/tests/lib.rs | 2 + 3 files changed, 253 insertions(+) diff --git a/library/core/src/clone.rs b/library/core/src/clone.rs index d448c5338fc46..d7ce65f6c53a9 100644 --- a/library/core/src/clone.rs +++ b/library/core/src/clone.rs @@ -36,6 +36,9 @@ #![stable(feature = "rust1", since = "1.0.0")] +use crate::mem::{self, MaybeUninit}; +use crate::ptr; + /// A common trait for the ability to explicitly duplicate an object. /// /// Differs from [`Copy`] in that [`Copy`] is implicit and an inexpensive bit-wise copy, while @@ -204,6 +207,189 @@ pub struct AssertParamIsCopy { _field: crate::marker::PhantomData, } +/// A generalization of [`Clone`] to dynamically-sized types stored in arbitrary containers. +/// +/// This trait is implemented for all types implementing [`Clone`], and also [slices](slice) of all +/// such types. You may also implement this trait to enable cloning trait objects and custom DSTs +/// (structures containing dynamically-sized fields). +/// +/// # Safety +/// +/// Implementations must ensure that when `.clone_to_uninit(dst)` returns normally rather than +/// panicking, it always leaves `*dst` initialized as a valid value of type `Self`. +/// +/// # See also +/// +/// * [`Clone::clone_from`] is a safe function which may be used instead when `Self` is a [`Sized`] +/// and the destination is already initialized; it may be able to reuse allocations owned by +/// the destination. +/// * [`ToOwned`], which allocates a new destination container. +/// +/// [`ToOwned`]: ../../std/borrow/trait.ToOwned.html +#[unstable(feature = "clone_to_uninit", issue = "126799")] +pub unsafe trait CloneToUninit { + /// Performs copy-assignment from `self` to `dst`. + /// + /// This is analogous to to `std::ptr::write(dst, self.clone())`, + /// except that `self` may be a dynamically-sized type ([`!Sized`](Sized)). + /// + /// Before this function is called, `dst` may point to uninitialized memory. + /// After this function is called, `dst` will point to initialized memory; it will be + /// sound to create a `&Self` reference from the pointer. + /// + /// # Safety + /// + /// Behavior is undefined if any of the following conditions are violated: + /// + /// * `dst` must be [valid] for writes. + /// * `dst` must be properly aligned. + /// * `dst` must have the same [pointer metadata] (slice length or `dyn` vtable) as `self`. + /// + /// [valid]: ptr#safety + /// [pointer metadata]: crate::ptr::metadata() + /// + /// # Panics + /// + /// This function may panic. (For example, it might panic if memory allocation for a clone + /// of a value owned by `self` fails.) + /// If the call panics, then `*dst` should be treated as uninitialized memory; it must not be + /// read or dropped, because even if it was previously valid, it may have been partially + /// overwritten. + /// + /// The caller may also need to take care to deallocate the allocation pointed to by `dst`, + /// if applicable, to avoid a memory leak, and may need to take other precautions to ensure + /// soundness in the presence of unwinding. + /// + /// Implementors should avoid leaking values by, upon unwinding, dropping all component values + /// that might have already been created. (For example, if a `[Foo]` of length 3 is being + /// cloned, and the second of the three calls to `Foo::clone()` unwinds, then the first `Foo` + /// cloned should be dropped.) + unsafe fn clone_to_uninit(&self, dst: *mut Self); +} + +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for T { + default unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of + // ptr::write(). + unsafe { + // We hope the optimizer will figure out to create the cloned value in-place, + // skipping ever storing it on the stack and the copy to the destination. + ptr::write(dst, self.clone()); + } + } +} + +// Specialized implementation for types that are [`Copy`], not just [`Clone`], +// and can therefore be copied bitwise. +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for T { + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of + // ptr::copy_nonoverlapping(). + unsafe { + ptr::copy_nonoverlapping(self, dst, 1); + } + } +} + +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for [T] { + #[cfg_attr(debug_assertions, track_caller)] + default unsafe fn clone_to_uninit(&self, dst: *mut Self) { + let len = self.len(); + // This is the most likely mistake to make, so check it as a debug assertion. + debug_assert_eq!( + len, + dst.len(), + "clone_to_uninit() source and destination must have equal lengths", + ); + + // SAFETY: The produced `&mut` is valid because: + // * The caller is obligated to provide a pointer which is valid for writes. + // * All bytes pointed to are in MaybeUninit, so we don't care about the memory's + // initialization status. + let uninit_ref = unsafe { &mut *(dst as *mut [MaybeUninit]) }; + + // Copy the elements + let mut initializing = InitializingSlice::from_fully_uninit(uninit_ref); + for element_ref in self.iter() { + // If the clone() panics, `initializing` will take care of the cleanup. + initializing.push(element_ref.clone()); + } + // If we reach here, then the entire slice is initialized, and we've satisfied our + // responsibilities to the caller. Disarm the cleanup guard by forgetting it. + mem::forget(initializing); + } +} + +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for [T] { + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + let len = self.len(); + // This is the most likely mistake to make, so check it as a debug assertion. + debug_assert_eq!( + len, + dst.len(), + "clone_to_uninit() source and destination must have equal lengths", + ); + + // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of + // ptr::copy_nonoverlapping(). + unsafe { + ptr::copy_nonoverlapping(self.as_ptr(), dst.as_mut_ptr(), len); + } + } +} + +/// Ownership of a collection of values stored in a non-owned `[MaybeUninit]`, some of which +/// are not yet initialized. This is sort of like a `Vec` that doesn't own its allocation. +/// Its responsibility is to provide cleanup on unwind by dropping the values that *are* +/// initialized, unless disarmed by forgetting. +/// +/// This is a helper for `impl CloneToUninit for [T]`. +struct InitializingSlice<'a, T> { + data: &'a mut [MaybeUninit], + /// Number of elements of `*self.data` that are initialized. + initialized_len: usize, +} + +impl<'a, T> InitializingSlice<'a, T> { + #[inline] + fn from_fully_uninit(data: &'a mut [MaybeUninit]) -> Self { + Self { data, initialized_len: 0 } + } + + /// Push a value onto the end of the initialized part of the slice. + /// + /// # Panics + /// + /// Panics if the slice is already fully initialized. + #[inline] + fn push(&mut self, value: T) { + MaybeUninit::write(&mut self.data[self.initialized_len], value); + self.initialized_len += 1; + } +} + +impl<'a, T> Drop for InitializingSlice<'a, T> { + #[cold] // will only be invoked on unwind + fn drop(&mut self) { + let initialized_slice = ptr::slice_from_raw_parts_mut( + MaybeUninit::slice_as_mut_ptr(self.data), + self.initialized_len, + ); + // SAFETY: + // * the pointer is valid because it was made from a mutable reference + // * `initialized_len` counts the initialized elements as an invariant of this type, + // so each of the pointed-to elements is initialized and may be dropped. + unsafe { + ptr::drop_in_place::<[T]>(initialized_slice); + } + } +} + /// Implementations of `Clone` for primitive types. /// /// Implementations that cannot be described in Rust diff --git a/library/core/tests/clone.rs b/library/core/tests/clone.rs index 64193e1155890..dd43bdc9f570a 100644 --- a/library/core/tests/clone.rs +++ b/library/core/tests/clone.rs @@ -1,3 +1,7 @@ +use core::clone::CloneToUninit; +use core::mem::MaybeUninit; +use core::sync::atomic::{AtomicUsize, Ordering::Relaxed}; + #[test] #[allow(suspicious_double_ref_op)] fn test_borrowed_clone() { @@ -14,3 +18,64 @@ fn test_clone_from() { b.clone_from(&a); assert_eq!(*b, 5); } + +#[test] +fn test_clone_to_uninit_slice_success() { + // Using `String`s to exercise allocation and Drop of the individual elements; + // if something is aliased or double-freed, at least Miri will catch that. + let a: [String; 3] = ["a", "b", "c"].map(String::from); + + let mut storage: MaybeUninit<[String; 3]> = MaybeUninit::uninit(); + let b: [String; 3] = unsafe { + a[..].clone_to_uninit(storage.as_mut_ptr() as *mut [String]); + storage.assume_init() + }; + + assert_eq!(a, b); +} + +#[test] +#[cfg(panic = "unwind")] +fn test_clone_to_uninit_slice_drops_on_panic() { + /// A static counter is OK to use as long as _this one test_ isn't run several times in + /// multiple threads. + static COUNTER: AtomicUsize = AtomicUsize::new(0); + /// Counts how many instances are live, and panics if a fifth one is created + struct CountsDropsAndPanics {} + impl CountsDropsAndPanics { + fn new() -> Self { + COUNTER.fetch_add(1, Relaxed); + Self {} + } + } + impl Clone for CountsDropsAndPanics { + fn clone(&self) -> Self { + if COUNTER.load(Relaxed) == 4 { panic!("intentional panic") } else { Self::new() } + } + } + impl Drop for CountsDropsAndPanics { + fn drop(&mut self) { + COUNTER.fetch_sub(1, Relaxed); + } + } + + let a: [CountsDropsAndPanics; 3] = core::array::from_fn(|_| CountsDropsAndPanics::new()); + assert_eq!(COUNTER.load(Relaxed), 3); + + let panic_payload = std::panic::catch_unwind(|| { + let mut storage: MaybeUninit<[CountsDropsAndPanics; 3]> = MaybeUninit::uninit(); + // This should panic halfway through + unsafe { + a[..].clone_to_uninit(storage.as_mut_ptr() as *mut [CountsDropsAndPanics]); + } + }) + .unwrap_err(); + assert_eq!(panic_payload.downcast().unwrap(), Box::new("intentional panic")); + + // Check for lack of leak, which is what this test is looking for + assert_eq!(COUNTER.load(Relaxed), 3, "leaked during clone!"); + + // Might as well exercise the rest of the drops + drop(a); + assert_eq!(COUNTER.load(Relaxed), 0); +} diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index f632883b563d3..454642e33f0ec 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -8,6 +8,7 @@ #![feature(async_iterator)] #![feature(bigint_helper_methods)] #![feature(cell_update)] +#![feature(clone_to_uninit)] #![feature(const_align_offset)] #![feature(const_align_of_val_raw)] #![feature(const_black_box)] @@ -54,6 +55,7 @@ #![feature(slice_split_once)] #![feature(split_as_slice)] #![feature(maybe_uninit_fill)] +#![feature(maybe_uninit_slice)] #![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_write_slice)] #![feature(maybe_uninit_uninit_array_transpose)] From 6788b793c3def91238665328da01e5dba009d55c Mon Sep 17 00:00:00 2001 From: Kevin Reid Date: Tue, 11 Jun 2024 10:46:27 -0700 Subject: [PATCH 2/3] Replace `WriteCloneIntoRaw` with `CloneToUninit`. --- library/alloc/src/alloc.rs | 26 -------------------------- library/alloc/src/boxed.rs | 6 ++++-- library/alloc/src/lib.rs | 1 + library/alloc/src/rc.rs | 6 +++--- library/alloc/src/sync.rs | 6 +++--- 5 files changed, 11 insertions(+), 34 deletions(-) diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index 6677534eafc6e..1833a7f477f00 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -424,29 +424,3 @@ pub mod __alloc_error_handler { } } } - -#[cfg(not(no_global_oom_handling))] -/// Specialize clones into pre-allocated, uninitialized memory. -/// Used by `Box::clone` and `Rc`/`Arc::make_mut`. -pub(crate) trait WriteCloneIntoRaw: Sized { - unsafe fn write_clone_into_raw(&self, target: *mut Self); -} - -#[cfg(not(no_global_oom_handling))] -impl WriteCloneIntoRaw for T { - #[inline] - default unsafe fn write_clone_into_raw(&self, target: *mut Self) { - // Having allocated *first* may allow the optimizer to create - // the cloned value in-place, skipping the local and move. - unsafe { target.write(self.clone()) }; - } -} - -#[cfg(not(no_global_oom_handling))] -impl WriteCloneIntoRaw for T { - #[inline] - unsafe fn write_clone_into_raw(&self, target: *mut Self) { - // We can always copy in-place, without ever involving a local value. - unsafe { target.copy_from_nonoverlapping(self, 1) }; - } -} diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index 01a954ed75beb..1ec095a46f704 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -188,6 +188,8 @@ use core::any::Any; use core::async_iter::AsyncIterator; use core::borrow; +#[cfg(not(no_global_oom_handling))] +use core::clone::CloneToUninit; use core::cmp::Ordering; use core::error::Error; use core::fmt; @@ -207,7 +209,7 @@ use core::slice; use core::task::{Context, Poll}; #[cfg(not(no_global_oom_handling))] -use crate::alloc::{handle_alloc_error, WriteCloneIntoRaw}; +use crate::alloc::handle_alloc_error; use crate::alloc::{AllocError, Allocator, Global, Layout}; #[cfg(not(no_global_oom_handling))] use crate::borrow::Cow; @@ -1346,7 +1348,7 @@ impl Clone for Box { // Pre-allocate memory to allow writing the cloned value directly. let mut boxed = Self::new_uninit_in(self.1.clone()); unsafe { - (**self).write_clone_into_raw(boxed.as_mut_ptr()); + (**self).clone_to_uninit(boxed.as_mut_ptr()); boxed.assume_init() } } diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 022a14b931a3c..0f759d64f669d 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -103,6 +103,7 @@ #![feature(assert_matches)] #![feature(async_fn_traits)] #![feature(async_iterator)] +#![feature(clone_to_uninit)] #![feature(coerce_unsized)] #![feature(const_align_of_val)] #![feature(const_box)] diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 2b7ab2f6e2590..f3a036989035a 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -249,6 +249,8 @@ use std::boxed::Box; use core::any::Any; use core::borrow; use core::cell::Cell; +#[cfg(not(no_global_oom_handling))] +use core::clone::CloneToUninit; use core::cmp::Ordering; use core::fmt; use core::hash::{Hash, Hasher}; @@ -268,8 +270,6 @@ use core::slice::from_raw_parts_mut; #[cfg(not(no_global_oom_handling))] use crate::alloc::handle_alloc_error; -#[cfg(not(no_global_oom_handling))] -use crate::alloc::WriteCloneIntoRaw; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; #[cfg(not(no_global_oom_handling))] @@ -1810,7 +1810,7 @@ impl Rc { let mut rc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Rc::get_mut_unchecked(&mut rc); - (**this).write_clone_into_raw(data.as_mut_ptr()); + (**this).clone_to_uninit(data.as_mut_ptr()); *this = rc.assume_init(); } } else if Rc::weak_count(this) != 0 { diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index f9d884e0ea551..6570fab42be1c 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -10,6 +10,8 @@ use core::any::Any; use core::borrow; +#[cfg(not(no_global_oom_handling))] +use core::clone::CloneToUninit; use core::cmp::Ordering; use core::fmt; use core::hash::{Hash, Hasher}; @@ -30,8 +32,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release}; #[cfg(not(no_global_oom_handling))] use crate::alloc::handle_alloc_error; -#[cfg(not(no_global_oom_handling))] -use crate::alloc::WriteCloneIntoRaw; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; @@ -2219,7 +2219,7 @@ impl Arc { let mut arc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Arc::get_mut_unchecked(&mut arc); - (**this).write_clone_into_raw(data.as_mut_ptr()); + (**this).clone_to_uninit(data.as_mut_ptr()); *this = arc.assume_init(); } } else if this.inner().weak.load(Relaxed) != 1 { From f5909278e2925031bd2af17c2ce666f5d7399587 Mon Sep 17 00:00:00 2001 From: Kevin Reid Date: Sun, 24 Sep 2023 10:57:54 -0700 Subject: [PATCH 3/3] Generalize `{Rc,Arc}::make_mut()` to unsized types. This requires introducing a new internal type `RcUninit` (and `ArcUninit`), which can own an `RcBox` without requiring it to be initialized, sized, or a slice. This is similar to `UniqueRc`, but `UniqueRc` doesn't support the allocator parameter, and there is no `UniqueArc`. --- library/alloc/src/rc.rs | 112 ++++++++++++++++++++++++++++++---- library/alloc/src/rc/tests.rs | 18 ++++++ library/alloc/src/sync.rs | 107 ++++++++++++++++++++++++++++---- library/alloc/tests/arc.rs | 18 ++++++ 4 files changed, 229 insertions(+), 26 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index f3a036989035a..3745ecb48c18e 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -1749,7 +1749,8 @@ impl Rc { } } -impl Rc { +#[cfg(not(no_global_oom_handling))] +impl Rc { /// Makes a mutable reference into the given `Rc`. /// /// If there are other `Rc` pointers to the same allocation, then `make_mut` will @@ -1800,31 +1801,52 @@ impl Rc { /// assert!(76 == *data); /// assert!(weak.upgrade().is_none()); /// ``` - #[cfg(not(no_global_oom_handling))] #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { + let size_of_val = size_of_val::(&**this); + if Rc::strong_count(this) != 1 { // Gotta clone the data, there are other Rcs. - // Pre-allocate memory to allow writing the cloned value directly. - let mut rc = Self::new_uninit_in(this.alloc.clone()); - unsafe { - let data = Rc::get_mut_unchecked(&mut rc); - (**this).clone_to_uninit(data.as_mut_ptr()); - *this = rc.assume_init(); - } + + let this_data_ref: &T = &**this; + // `in_progress` drops the allocation if we panic before finishing initializing it. + let mut in_progress: UniqueRcUninit = + UniqueRcUninit::new(this_data_ref, this.alloc.clone()); + + // Initialize with clone of this. + let initialized_clone = unsafe { + // Clone. If the clone panics, `in_progress` will be dropped and clean up. + this_data_ref.clone_to_uninit(in_progress.data_ptr()); + // Cast type of pointer, now that it is initialized. + in_progress.into_rc() + }; + + // Replace `this` with newly constructed Rc. + *this = initialized_clone; } else if Rc::weak_count(this) != 0 { // Can just steal the data, all that's left is Weaks - let mut rc = Self::new_uninit_in(this.alloc.clone()); + + // We don't need panic-protection like the above branch does, but we might as well + // use the same mechanism. + let mut in_progress: UniqueRcUninit = + UniqueRcUninit::new(&**this, this.alloc.clone()); unsafe { - let data = Rc::get_mut_unchecked(&mut rc); - data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); + // Initialize `in_progress` with move of **this. + // We have to express this in terms of bytes because `T: ?Sized`; there is no + // operation that just copies a value based on its `size_of_val()`. + ptr::copy_nonoverlapping( + ptr::from_ref(&**this).cast::(), + in_progress.data_ptr().cast::(), + size_of_val, + ); this.inner().dec_strong(); // Remove implicit strong-weak ref (no need to craft a fake // Weak here -- we know other Weaks can clean up for us) this.inner().dec_weak(); - ptr::write(this, rc.assume_init()); + // Replace `this` with newly constructed Rc that has the moved data. + ptr::write(this, in_progress.into_rc()); } } // This unsafety is ok because we're guaranteed that the pointer @@ -3686,3 +3708,67 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc { } } } + +/// A unique owning pointer to a [`RcBox`] **that does not imply the contents are initialized,** +/// but will deallocate it (without dropping the value) when dropped. +/// +/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic. +/// It is nearly a duplicate of `UniqueRc, A>` except that it allows `T: !Sized`, +/// which `MaybeUninit` does not. +#[cfg(not(no_global_oom_handling))] +struct UniqueRcUninit { + ptr: NonNull>, + layout_for_value: Layout, + alloc: Option, +} + +#[cfg(not(no_global_oom_handling))] +impl UniqueRcUninit { + /// Allocate a RcBox with layout suitable to contain `for_value` or a clone of it. + fn new(for_value: &T, alloc: A) -> UniqueRcUninit { + let layout = Layout::for_value(for_value); + let ptr = unsafe { + Rc::allocate_for_layout( + layout, + |layout_for_rcbox| alloc.allocate(layout_for_rcbox), + |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcBox), + ) + }; + Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) } + } + + /// Returns the pointer to be written into to initialize the [`Rc`]. + fn data_ptr(&mut self) -> *mut T { + let offset = data_offset_align(self.layout_for_value.align()); + unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T } + } + + /// Upgrade this into a normal [`Rc`]. + /// + /// # Safety + /// + /// The data must have been initialized (by writing to [`Self::data_ptr()`]). + unsafe fn into_rc(mut self) -> Rc { + let ptr = self.ptr; + let alloc = self.alloc.take().unwrap(); + mem::forget(self); + // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible + // for having initialized the data. + unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl Drop for UniqueRcUninit { + fn drop(&mut self) { + // SAFETY: + // * new() produced a pointer safe to deallocate. + // * We own the pointer unless into_rc() was called, which forgets us. + unsafe { + self.alloc + .take() + .unwrap() + .deallocate(self.ptr.cast(), rcbox_layout_for_value_layout(self.layout_for_value)); + } + } +} diff --git a/library/alloc/src/rc/tests.rs b/library/alloc/src/rc/tests.rs index 0f09be7721fa9..5e2e4beb94a2b 100644 --- a/library/alloc/src/rc/tests.rs +++ b/library/alloc/src/rc/tests.rs @@ -316,6 +316,24 @@ fn test_cowrc_clone_weak() { assert!(cow1_weak.upgrade().is_none()); } +/// This is similar to the doc-test for `Rc::make_mut()`, but on an unsized type (slice). +#[test] +fn test_cowrc_unsized() { + use std::rc::Rc; + + let mut data: Rc<[i32]> = Rc::new([10, 20, 30]); + + Rc::make_mut(&mut data)[0] += 1; // Won't clone anything + let mut other_data = Rc::clone(&data); // Won't clone inner data + Rc::make_mut(&mut data)[1] += 1; // Clones inner data + Rc::make_mut(&mut data)[2] += 1; // Won't clone anything + Rc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything + + // Now `data` and `other_data` point to different allocations. + assert_eq!(*data, [11, 21, 31]); + assert_eq!(*other_data, [110, 20, 30]); +} + #[test] fn test_show() { let foo = Rc::new(75); diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 6570fab42be1c..90672164cb932 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -2150,7 +2150,8 @@ unsafe impl DerefPure for Arc {} #[unstable(feature = "receiver_trait", issue = "none")] impl Receiver for Arc {} -impl Arc { +#[cfg(not(no_global_oom_handling))] +impl Arc { /// Makes a mutable reference into the given `Arc`. /// /// If there are other `Arc` pointers to the same allocation, then `make_mut` will @@ -2201,10 +2202,11 @@ impl Arc { /// assert!(76 == *data); /// assert!(weak.upgrade().is_none()); /// ``` - #[cfg(not(no_global_oom_handling))] #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { + let size_of_val = mem::size_of_val::(&**this); + // Note that we hold both a strong reference and a weak reference. // Thus, releasing our strong reference only will not, by itself, cause // the memory to be deallocated. @@ -2215,13 +2217,19 @@ impl Arc { // deallocated. if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { // Another strong pointer exists, so we must clone. - // Pre-allocate memory to allow writing the cloned value directly. - let mut arc = Self::new_uninit_in(this.alloc.clone()); - unsafe { - let data = Arc::get_mut_unchecked(&mut arc); - (**this).clone_to_uninit(data.as_mut_ptr()); - *this = arc.assume_init(); - } + + let this_data_ref: &T = &**this; + // `in_progress` drops the allocation if we panic before finishing initializing it. + let mut in_progress: UniqueArcUninit = + UniqueArcUninit::new(this_data_ref, this.alloc.clone()); + + let initialized_clone = unsafe { + // Clone. If the clone panics, `in_progress` will be dropped and clean up. + this_data_ref.clone_to_uninit(in_progress.data_ptr()); + // Cast type of pointer, now that it is initialized. + in_progress.into_arc() + }; + *this = initialized_clone; } else if this.inner().weak.load(Relaxed) != 1 { // Relaxed suffices in the above because this is fundamentally an // optimization: we are always racing with weak pointers being @@ -2240,11 +2248,22 @@ impl Arc { let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() }; // Can just steal the data, all that's left is Weaks - let mut arc = Self::new_uninit_in(this.alloc.clone()); + // + // We don't need panic-protection like the above branch does, but we might as well + // use the same mechanism. + let mut in_progress: UniqueArcUninit = + UniqueArcUninit::new(&**this, this.alloc.clone()); unsafe { - let data = Arc::get_mut_unchecked(&mut arc); - data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); - ptr::write(this, arc.assume_init()); + // Initialize `in_progress` with move of **this. + // We have to express this in terms of bytes because `T: ?Sized`; there is no + // operation that just copies a value based on its `size_of_val()`. + ptr::copy_nonoverlapping( + ptr::from_ref(&**this).cast::(), + in_progress.data_ptr().cast::(), + size_of_val, + ); + + ptr::write(this, in_progress.into_arc()); } } else { // We were the sole reference of either kind; bump back up the @@ -3809,6 +3828,68 @@ fn data_offset_align(align: usize) -> usize { layout.size() + layout.padding_needed_for(align) } +/// A unique owning pointer to a [`ArcInner`] **that does not imply the contents are initialized,** +/// but will deallocate it (without dropping the value) when dropped. +/// +/// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic. +#[cfg(not(no_global_oom_handling))] +struct UniqueArcUninit { + ptr: NonNull>, + layout_for_value: Layout, + alloc: Option, +} + +#[cfg(not(no_global_oom_handling))] +impl UniqueArcUninit { + /// Allocate a ArcInner with layout suitable to contain `for_value` or a clone of it. + fn new(for_value: &T, alloc: A) -> UniqueArcUninit { + let layout = Layout::for_value(for_value); + let ptr = unsafe { + Arc::allocate_for_layout( + layout, + |layout_for_arcinner| alloc.allocate(layout_for_arcinner), + |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const ArcInner), + ) + }; + Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) } + } + + /// Returns the pointer to be written into to initialize the [`Arc`]. + fn data_ptr(&mut self) -> *mut T { + let offset = data_offset_align(self.layout_for_value.align()); + unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T } + } + + /// Upgrade this into a normal [`Arc`]. + /// + /// # Safety + /// + /// The data must have been initialized (by writing to [`Self::data_ptr()`]). + unsafe fn into_arc(mut self) -> Arc { + let ptr = self.ptr; + let alloc = self.alloc.take().unwrap(); + mem::forget(self); + // SAFETY: The pointer is valid as per `UniqueArcUninit::new`, and the caller is responsible + // for having initialized the data. + unsafe { Arc::from_ptr_in(ptr.as_ptr(), alloc) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl Drop for UniqueArcUninit { + fn drop(&mut self) { + // SAFETY: + // * new() produced a pointer safe to deallocate. + // * We own the pointer unless into_arc() was called, which forgets us. + unsafe { + self.alloc.take().unwrap().deallocate( + self.ptr.cast(), + arcinner_layout_for_value_layout(self.layout_for_value), + ); + } + } +} + #[stable(feature = "arc_error", since = "1.52.0")] impl core::error::Error for Arc { #[allow(deprecated, deprecated_in_future)] diff --git a/library/alloc/tests/arc.rs b/library/alloc/tests/arc.rs index d564a30b10394..c37a80dca95c8 100644 --- a/library/alloc/tests/arc.rs +++ b/library/alloc/tests/arc.rs @@ -209,3 +209,21 @@ fn weak_may_dangle() { // `val` dropped here while still borrowed // borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak` } + +/// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice). +#[test] +fn make_mut_unsized() { + use alloc::sync::Arc; + + let mut data: Arc<[i32]> = Arc::new([10, 20, 30]); + + Arc::make_mut(&mut data)[0] += 1; // Won't clone anything + let mut other_data = Arc::clone(&data); // Won't clone inner data + Arc::make_mut(&mut data)[1] += 1; // Clones inner data + Arc::make_mut(&mut data)[2] += 1; // Won't clone anything + Arc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything + + // Now `data` and `other_data` point to different allocations. + assert_eq!(*data, [11, 21, 31]); + assert_eq!(*other_data, [110, 20, 30]); +}