From 414be074f82f16f77eca8927b5468179b4060ee8 Mon Sep 17 00:00:00 2001 From: The 8472 Date: Sat, 29 Jul 2023 16:08:24 +0200 Subject: [PATCH 1/2] typo fix --- library/alloc/src/vec/into_iter.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index b03e04b7c706f..3a5511fe08b78 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -136,7 +136,7 @@ impl IntoIter { /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed. pub(crate) fn forget_remaining_elements(&mut self) { - // For th ZST case, it is crucial that we mutate `end` here, not `ptr`. + // For the ZST case, it is crucial that we mutate `end` here, not `ptr`. // `ptr` must stay aligned, while `end` may be unaligned. self.end = self.ptr; } From b05a2347c6a98d6b24ae022315fdc1ef5e511de2 Mon Sep 17 00:00:00 2001 From: The 8472 Date: Sat, 29 Jul 2023 16:25:53 +0200 Subject: [PATCH 2/2] mark vec::IntoIter pointers as `!nonnull` --- library/alloc/src/lib.rs | 2 + library/alloc/src/vec/in_place_collect.rs | 2 +- library/alloc/src/vec/into_iter.rs | 106 ++++++++++++++-------- library/alloc/src/vec/mod.rs | 10 +- library/alloc/src/vec/spec_from_iter.rs | 4 +- tests/codegen/vec-iter.rs | 46 ++++++++++ 6 files changed, 121 insertions(+), 49 deletions(-) create mode 100644 tests/codegen/vec-iter.rs diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 0af3ac38ee534..71919bc89e136 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -139,6 +139,7 @@ #![feature(maybe_uninit_slice)] #![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_uninit_array_transpose)] +#![feature(non_null_convenience)] #![feature(pattern)] #![feature(ptr_internals)] #![feature(ptr_metadata)] @@ -180,6 +181,7 @@ #![feature(const_ptr_write)] #![feature(const_trait_impl)] #![feature(const_try)] +#![feature(decl_macro)] #![feature(dropck_eyepatch)] #![feature(exclusive_range_pattern)] #![feature(fundamental)] diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index a6cbed092c0ac..0a2280545dac3 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -257,7 +257,7 @@ where // then the source pointer will stay in its initial position and we can't use it as reference if src.ptr != src_ptr { debug_assert!( - unsafe { dst_buf.add(len) as *const _ } <= src.ptr, + unsafe { dst_buf.add(len) as *const _ } <= src.ptr.as_ptr(), "InPlaceIterable contract violation, write pointer advanced beyond read pointer" ); } diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index 3a5511fe08b78..654ce09afcd30 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -18,6 +18,17 @@ use core::ops::Deref; use core::ptr::{self, NonNull}; use core::slice::{self}; +macro non_null { + (mut $place:expr, $t:ident) => {{ + #![allow(unused_unsafe)] // we're sometimes used within an unsafe block + unsafe { &mut *(ptr::addr_of_mut!($place) as *mut NonNull<$t>) } + }}, + ($place:expr, $t:ident) => {{ + #![allow(unused_unsafe)] // we're sometimes used within an unsafe block + unsafe { *(ptr::addr_of!($place) as *const NonNull<$t>) } + }}, +} + /// An iterator that moves out of a vector. /// /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) @@ -41,10 +52,12 @@ pub struct IntoIter< // the drop impl reconstructs a RawVec from buf, cap and alloc // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop pub(super) alloc: ManuallyDrop, - pub(super) ptr: *const T, - pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that - // ptr == end is a quick test for the Iterator being empty, that works - // for both ZST and non-ZST. + pub(super) ptr: NonNull, + /// If T is a ZST, this is actually ptr+len. This encoding is picked so that + /// ptr == end is a quick test for the Iterator being empty, that works + /// for both ZST and non-ZST. + /// For non-ZSTs the pointer is treated as `NonNull` + pub(super) end: *const T, } #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] @@ -68,7 +81,7 @@ impl IntoIter { /// ``` #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] pub fn as_slice(&self) -> &[T] { - unsafe { slice::from_raw_parts(self.ptr, self.len()) } + unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) } } /// Returns the remaining items of this iterator as a mutable slice. @@ -97,7 +110,7 @@ impl IntoIter { } fn as_raw_mut_slice(&mut self) -> *mut [T] { - ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) + ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len()) } /// Drops remaining elements and relinquishes the backing allocation. @@ -124,7 +137,7 @@ impl IntoIter { // this creates less assembly self.cap = 0; self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; - self.ptr = self.buf.as_ptr(); + self.ptr = self.buf; self.end = self.buf.as_ptr(); // Dropping the remaining elements can panic, so this needs to be @@ -138,7 +151,7 @@ impl IntoIter { pub(crate) fn forget_remaining_elements(&mut self) { // For the ZST case, it is crucial that we mutate `end` here, not `ptr`. // `ptr` must stay aligned, while `end` may be unaligned. - self.end = self.ptr; + self.end = self.ptr.as_ptr(); } #[cfg(not(no_global_oom_handling))] @@ -160,7 +173,7 @@ impl IntoIter { // say that they're all at the beginning of the "allocation". 0..this.len() } else { - this.ptr.sub_ptr(buf)..this.end.sub_ptr(buf) + this.ptr.sub_ptr(this.buf)..this.end.sub_ptr(buf) }; let cap = this.cap; let alloc = ManuallyDrop::take(&mut this.alloc); @@ -187,29 +200,35 @@ impl Iterator for IntoIter { #[inline] fn next(&mut self) -> Option { - if self.ptr == self.end { - None - } else if T::IS_ZST { - // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by - // reducing the `end`. - self.end = self.end.wrapping_byte_sub(1); - - // Make up a value of this ZST. - Some(unsafe { mem::zeroed() }) + if T::IS_ZST { + if self.ptr.as_ptr() == self.end as *mut _ { + None + } else { + // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by + // reducing the `end`. + self.end = self.end.wrapping_byte_sub(1); + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } } else { - let old = self.ptr; - self.ptr = unsafe { self.ptr.add(1) }; + if self.ptr == non_null!(self.end, T) { + None + } else { + let old = self.ptr; + self.ptr = unsafe { old.add(1) }; - Some(unsafe { ptr::read(old) }) + Some(unsafe { ptr::read(old.as_ptr()) }) + } } } #[inline] fn size_hint(&self) -> (usize, Option) { let exact = if T::IS_ZST { - self.end.addr().wrapping_sub(self.ptr.addr()) + self.end.addr().wrapping_sub(self.ptr.as_ptr().addr()) } else { - unsafe { self.end.sub_ptr(self.ptr) } + unsafe { non_null!(self.end, T).sub_ptr(self.ptr) } }; (exact, Some(exact)) } @@ -217,7 +236,7 @@ impl Iterator for IntoIter { #[inline] fn advance_by(&mut self, n: usize) -> Result<(), NonZeroUsize> { let step_size = self.len().min(n); - let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size); + let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size); if T::IS_ZST { // See `next` for why we sub `end` here. self.end = self.end.wrapping_byte_sub(step_size); @@ -259,7 +278,7 @@ impl Iterator for IntoIter { // Safety: `len` indicates that this many elements are available and we just checked that // it fits into the array. unsafe { - ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len); + ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len); self.forget_remaining_elements(); return Err(array::IntoIter::new_unchecked(raw_ary, 0..len)); } @@ -268,7 +287,7 @@ impl Iterator for IntoIter { // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize // the array. return unsafe { - ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N); + ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N); self.ptr = self.ptr.add(N); Ok(raw_ary.transpose().assume_init()) }; @@ -286,7 +305,7 @@ impl Iterator for IntoIter { // Also note the implementation of `Self: TrustedRandomAccess` requires // that `T: Copy` so reading elements from the buffer doesn't invalidate // them for `Drop`. - unsafe { if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } } + unsafe { if T::IS_ZST { mem::zeroed() } else { self.ptr.add(i).read() } } } } @@ -294,18 +313,25 @@ impl Iterator for IntoIter { impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { - if self.end == self.ptr { - None - } else if T::IS_ZST { - // See above for why 'ptr.offset' isn't used - self.end = self.end.wrapping_byte_sub(1); - - // Make up a value of this ZST. - Some(unsafe { mem::zeroed() }) + if T::IS_ZST { + if self.end as *mut _ == self.ptr.as_ptr() { + None + } else { + // See above for why 'ptr.offset' isn't used + self.end = self.end.wrapping_byte_sub(1); + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } } else { - self.end = unsafe { self.end.sub(1) }; + if non_null!(self.end, T) == self.ptr { + None + } else { + let new_end = unsafe { non_null!(self.end, T).sub(1) }; + *non_null!(mut self.end, T) = new_end; - Some(unsafe { ptr::read(self.end) }) + Some(unsafe { ptr::read(new_end.as_ptr()) }) + } } } @@ -331,7 +357,11 @@ impl DoubleEndedIterator for IntoIter { #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn is_empty(&self) -> bool { - self.ptr == self.end + if T::IS_ZST { + self.ptr.as_ptr() == self.end as *mut _ + } else { + self.ptr == non_null!(self.end, T) + } } } diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index fca85c6123b3f..e73e42d013094 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -2825,14 +2825,8 @@ impl IntoIterator for Vec { begin.add(me.len()) as *const T }; let cap = me.buf.capacity(); - IntoIter { - buf: NonNull::new_unchecked(begin), - phantom: PhantomData, - cap, - alloc, - ptr: begin, - end, - } + let buf = NonNull::new_unchecked(begin); + IntoIter { buf, phantom: PhantomData, cap, alloc, ptr: buf, end } } } } diff --git a/library/alloc/src/vec/spec_from_iter.rs b/library/alloc/src/vec/spec_from_iter.rs index efa6868473e49..e976552cf2b92 100644 --- a/library/alloc/src/vec/spec_from_iter.rs +++ b/library/alloc/src/vec/spec_from_iter.rs @@ -44,12 +44,12 @@ impl SpecFromIter> for Vec { // than creating it through the generic FromIterator implementation would. That limitation // is not strictly necessary as Vec's allocation behavior is intentionally unspecified. // But it is a conservative choice. - let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr; + let has_advanced = iterator.buf != iterator.ptr; if !has_advanced || iterator.len() >= iterator.cap / 2 { unsafe { let it = ManuallyDrop::new(iterator); if has_advanced { - ptr::copy(it.ptr, it.buf.as_ptr(), it.len()); + ptr::copy(it.ptr.as_ptr(), it.buf.as_ptr(), it.len()); } return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap); } diff --git a/tests/codegen/vec-iter.rs b/tests/codegen/vec-iter.rs new file mode 100644 index 0000000000000..567a999e4e2e2 --- /dev/null +++ b/tests/codegen/vec-iter.rs @@ -0,0 +1,46 @@ +// ignore-debug: the debug assertions get in the way +// compile-flags: -O +#![crate_type = "lib"] +#![feature(exact_size_is_empty)] + +use std::vec; + +// CHECK-LABEL: @vec_iter_len_nonnull +#[no_mangle] +pub fn vec_iter_len_nonnull(it: &vec::IntoIter) -> usize { + // CHECK: %subtrahend.i.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: %self2.i.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: sub nuw + // CHECK: ret + it.len() +} + +// CHECK-LABEL: @vec_iter_is_empty_nonnull +#[no_mangle] +pub fn vec_iter_is_empty_nonnull(it: &vec::IntoIter) -> bool { + // CHECK: %other.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: %self2.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: ret + it.is_empty() +} + +// CHECK-LABEL: @vec_iter_next +#[no_mangle] +pub fn vec_iter_next(it: &mut vec::IntoIter) -> Option { + // CHECK: %other.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: %self2.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: ret + it.next() +}