Skip to content

Commit

Permalink
Auto merge of #2323 - RalfJung:box-is-special, r=RalfJung
Browse files Browse the repository at this point in the history
handle Box with allocators

This is the Miri side of rust-lang/rust#98847.

Thanks `@DrMeepster` for doing most of the work of getting this test case to pass in Miri. :)
  • Loading branch information
bors committed Jul 5, 2022
2 parents 1a60a7d + 5530e8f commit 5e2266d
Show file tree
Hide file tree
Showing 2 changed files with 114 additions and 9 deletions.
32 changes: 23 additions & 9 deletions src/stacked_borrows.rs
Original file line number Diff line number Diff line change
Expand Up @@ -976,27 +976,30 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
// Raw pointers need to be enabled.
ty::RawPtr(tym) if kind == RetagKind::Raw =>
Some((RefKind::Raw { mutable: tym.mutbl == Mutability::Mut }, false)),
// Boxes do not get a protector: protectors reflect that references outlive the call
// they were passed in to; that's just not the case for boxes.
ty::Adt(..) if ty.is_box() => Some((RefKind::Unique { two_phase: false }, false)),
// Boxes are handled separately due to that allocator situation.
_ => None,
}
}

// We need a visitor to visit all references. However, that requires
// a `MPlaceTy` (or `OpTy), so we have a fast path for reference types that
// a `MPlaceTy` (or `OpTy`), so we have a fast path for reference types that
// avoids allocating.

if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) {
if let Some((ref_kind, protector)) = qualify(place.layout.ty, kind) {
// Fast path.
let val = this.read_immediate(&this.place_to_op(place)?)?;
let val = this.retag_reference(&val, mutbl, protector)?;
let val = this.retag_reference(&val, ref_kind, protector)?;
this.write_immediate(*val, place)?;
return Ok(());
}

// If we don't want to recurse, we are already done.
if !this.machine.stacked_borrows.as_mut().unwrap().get_mut().retag_fields {
// EXCEPT if this is a `Box`, then we have to recurse because allocators.
// (Yes this means we technically also recursively retag the allocator itself even if field
// retagging is not enabled. *shrug*)
if !this.machine.stacked_borrows.as_mut().unwrap().get_mut().retag_fields
&& !place.layout.ty.ty_adt_def().is_some_and(|adt| adt.is_box())
{
return Ok(());
}

Expand Down Expand Up @@ -1034,10 +1037,21 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
self.ecx
}

fn visit_box(&mut self, place: &MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx> {
// Boxes do not get a protector: protectors reflect that references outlive the call
// they were passed in to; that's just not the case for boxes.
let (ref_kind, protector) = (RefKind::Unique { two_phase: false }, false);

let val = self.ecx.read_immediate(&place.into())?;
let val = self.ecx.retag_reference(&val, ref_kind, protector)?;
self.ecx.write_immediate(*val, &place.into())?;
Ok(())
}

fn visit_value(&mut self, place: &MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx> {
if let Some((mutbl, protector)) = qualify(place.layout.ty, self.kind) {
if let Some((ref_kind, protector)) = qualify(place.layout.ty, self.kind) {
let val = self.ecx.read_immediate(&place.into())?;
let val = self.ecx.retag_reference(&val, mutbl, protector)?;
let val = self.ecx.retag_reference(&val, ref_kind, protector)?;
self.ecx.write_immediate(*val, &place.into())?;
} else if matches!(place.layout.ty.kind(), ty::RawPtr(..)) {
// Wide raw pointers *do* have fields and their types are strange.
Expand Down
91 changes: 91 additions & 0 deletions tests/pass/box-custom-alloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
#![allow(incomplete_features)] // for trait upcasting
#![feature(allocator_api, trait_upcasting)]

use std::alloc::{AllocError, Allocator};
use std::alloc::Layout;
use std::cell::Cell;
use std::mem::MaybeUninit;
use std::ptr::{self, NonNull};

struct OnceAlloc<'a> {
space: Cell<&'a mut [MaybeUninit<u8>]>,
}

unsafe impl<'shared, 'a: 'shared> Allocator for &'shared OnceAlloc<'a> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let space = self.space.replace(&mut []);

let (ptr, len) = (space.as_mut_ptr(), space.len());

if ptr.align_offset(layout.align()) != 0 || len < layout.size() {
return Err(AllocError);
}

let slice_ptr = ptr::slice_from_raw_parts_mut(ptr as *mut u8, len);
unsafe { Ok(NonNull::new_unchecked(slice_ptr)) }
}

unsafe fn deallocate(&self, _ptr: NonNull<u8>, _layout: Layout) {}
}

trait MyTrait {
fn hello(&self) -> u8;
}

impl MyTrait for [u8; 1] {
fn hello(&self) -> u8 {
self[0]
}
}

trait TheTrait: MyTrait {}

impl TheTrait for [u8; 1] {}

/// `Box<T, G>` is a `ScalarPair` where the 2nd component is the allocator.
fn test1() {
let mut space = vec![MaybeUninit::new(0); 1];
let once_alloc = OnceAlloc {
space: Cell::new(&mut space[..]),
};

let boxed = Box::new_in([42u8; 1], &once_alloc);
let _val = *boxed;
let with_dyn: Box<dyn TheTrait, &OnceAlloc> = boxed;
assert_eq!(42, with_dyn.hello());
let with_dyn: Box<dyn MyTrait, &OnceAlloc> = with_dyn; // upcast
assert_eq!(42, with_dyn.hello());
}

// Make the allocator itself so big that the Box is not even a ScalarPair any more.
struct OnceAllocRef<'s, 'a>(&'s OnceAlloc<'a>, u64);

unsafe impl<'shared, 'a: 'shared> Allocator for OnceAllocRef<'shared, 'a> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.0.allocate(layout)
}

unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
self.0.deallocate(ptr, layout)
}
}

/// `Box<T, G>` is an `Aggregate`.
fn test2() {
let mut space = vec![MaybeUninit::new(0); 1];
let once_alloc = OnceAlloc {
space: Cell::new(&mut space[..]),
};

let boxed = Box::new_in([42u8; 1], OnceAllocRef(&once_alloc, 0));
let _val = *boxed;
let with_dyn: Box<dyn TheTrait, OnceAllocRef> = boxed;
assert_eq!(42, with_dyn.hello());
let with_dyn: Box<dyn MyTrait, OnceAllocRef> = with_dyn; // upcast
assert_eq!(42, with_dyn.hello());
}

fn main() {
test1();
test2();
}

0 comments on commit 5e2266d

Please sign in to comment.