Skip to content

Commit

Permalink
Use new allocator API, and otherwise update for new Rust
Browse files Browse the repository at this point in the history
- New allocator API
- Remove the "allocator" feature, which should be unnecessary due to how
the new allocator API works
- NonZero no longer implements Deref (rust-lang/rust#41064)
- NonZero::new() returns an Option; use NonZero::new_unchecked()
- Thread locals are no longer 'static (rust-lang/rust#43746)
- Changes to feature flags
- Use unsafe to access extern static (rust-lang/rust#36247)
  • Loading branch information
ids1024 committed Nov 8, 2017
1 parent 56a91d8 commit cd8d627
Show file tree
Hide file tree
Showing 10 changed files with 75 additions and 94 deletions.
3 changes: 1 addition & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,9 @@ debug-assertions = false
codegen-units = 1

[features]
default = ["allocator", "tls"]
default = ["tls"]
# ---
alloc_id = []
allocator = []
debugger = []
log = ["write", "alloc_id"]
no_log_lock = ["log"]
Expand Down
6 changes: 3 additions & 3 deletions shim/src/thread_destructor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ pub mod arch {
/// A thread destructor.
type Dtor = unsafe extern fn(dtor: unsafe extern fn(*mut u8), arg: *mut u8, dso_handle: *mut u8) -> i32;

// Make sure the symbols exist.
assert!(!__cxa_thread_atexit_impl.is_null());

unsafe {
// Make sure the symbols exist.
assert!(!__cxa_thread_atexit_impl.is_null());

mem::transmute::<*const u8, Dtor>(__cxa_thread_atexit_impl)
(dtor, t, &__dso_handle as *const _ as *mut _)
};
Expand Down
6 changes: 3 additions & 3 deletions src/allocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ impl Allocator for LocalAllocator {
pub fn alloc(size: usize, align: usize) -> *mut u8 {
log!(CALL, "Allocating buffer of size {} (align {}).", size, align);

get_allocator!(|alloc| *Pointer::from(alloc.alloc(size, align)))
get_allocator!(|alloc| Pointer::from(alloc.alloc(size, align)).get())
}

/// Free a buffer.
Expand Down Expand Up @@ -353,11 +353,11 @@ pub unsafe fn realloc(ptr: *mut u8, old_size: usize, size: usize, align: usize)
log!(CALL, "Reallocating buffer of size {} to new size {}.", old_size, size);

get_allocator!(|alloc| {
*Pointer::from(alloc.realloc(
Pointer::from(alloc.realloc(
Block::from_raw_parts(Pointer::new(ptr), old_size),
size,
align
))
)).get()
})
}

Expand Down
18 changes: 9 additions & 9 deletions src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ impl Block {
/// Is this block aligned to `align`?
#[inline]
pub fn aligned_to(&self, align: usize) -> bool {
*self.ptr as usize % align == 0
self.ptr.get() as usize % align == 0
}

/// memcpy the block to another pointer.
Expand All @@ -129,7 +129,7 @@ impl Block {
// LAST AUDIT: 2016-08-21 (Ticki).

// From the invariants of `Block`, this copy is well-defined.
ptr::copy_nonoverlapping(*self.ptr, *block.ptr, self.size);
ptr::copy_nonoverlapping(self.ptr.get(), block.ptr.get(), self.size);
}
}

Expand All @@ -145,7 +145,7 @@ impl Block {

// Since the memory of the block is inaccessible (read-wise), zeroing it is fully
// safe.
intrinsics::volatile_set_memory(*self.ptr, 0, self.size);
intrinsics::volatile_set_memory(self.ptr.get(), 0, self.size);
}
}
}
Expand All @@ -162,7 +162,7 @@ impl Block {
#[inline]
pub fn left_to(&self, to: &Block) -> bool {
// This won't overflow due to the end being bounded by the address space.
self.size + *self.ptr as usize == *to.ptr as usize
self.size + self.ptr.get() as usize == to.ptr.get() as usize
}

/// Split the block at some position.
Expand Down Expand Up @@ -207,7 +207,7 @@ impl Block {

// Calculate the aligner, which defines the smallest size required as precursor to align
// the block to `align`.
let aligner = (align - *self.ptr as usize % align) % align;
let aligner = (align - self.ptr.get() as usize % align) % align;
// ^^^^^^^^
// To avoid wasting space on the case where the block is already aligned, we calculate it
// modulo `align`.
Expand Down Expand Up @@ -275,30 +275,30 @@ impl From<Block> for Pointer<u8> {
impl PartialOrd for Block {
#[inline]
fn partial_cmp(&self, other: &Block) -> Option<cmp::Ordering> {
self.ptr.partial_cmp(&other.ptr)
self.ptr.get().partial_cmp(&other.ptr.get())
}
}

/// Compare the blocks address.
impl Ord for Block {
#[inline]
fn cmp(&self, other: &Block) -> cmp::Ordering {
self.ptr.cmp(&other.ptr)
self.ptr.get().cmp(&other.ptr.get())
}
}

impl cmp::PartialEq for Block {
#[inline]
fn eq(&self, other: &Block) -> bool {
*self.ptr == *other.ptr
self.ptr.get() == other.ptr.get()
}
}

impl cmp::Eq for Block {}

impl fmt::Debug for Block {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:x}[{}]", *self.ptr as usize, self.size)
write!(f, "0x{:x}[{}]", self.ptr.get() as usize, self.size)
}
}

Expand Down
4 changes: 2 additions & 2 deletions src/brk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ impl BrkLock {
let expected_brk = self.current_brk().offset(size);

// Break it to me, babe!
let old_brk = Pointer::new(syscalls::brk(*expected_brk as *const u8) as *mut u8);
let old_brk = Pointer::new(syscalls::brk(expected_brk.get() as *const u8) as *mut u8);

/// AAAARGH WAY TOO MUCH LOGGING
///
Expand Down Expand Up @@ -180,7 +180,7 @@ pub fn lock() -> BrkLock {
///
/// On failure the maximum pointer (`!0 as *mut u8`) is returned.
pub unsafe extern fn sbrk(size: isize) -> *mut u8 {
*lock().sbrk(size).unwrap_or_else(|()| Pointer::new(!0 as *mut u8))
lock().sbrk(size).unwrap_or_else(|()| Pointer::new(!0 as *mut u8)).get()
}

/// Get the current program break.
Expand Down
48 changes: 43 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,29 +9,28 @@
//! relatively strong condition, which means that you are forced to rewrite primitives and make
//! sure no allocation ever happens.

#![cfg_attr(feature = "allocator", allocator)]
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]

#![no_std]

#![feature(allocator, const_fn, core_intrinsics, stmt_expr_attributes, drop_types_in_const,
#![feature(alloc, allocator_api, const_fn, core_intrinsics, stmt_expr_attributes, drop_types_in_const,
nonzero, optin_builtin_traits, type_ascription, thread_local, linkage,
try_from)]
try_from, const_unsafe_cell_new, const_atomic_bool_new, const_nonzero_new,
const_atomic_ptr_new)]
#![warn(missing_docs, cast_precision_loss, cast_sign_loss, cast_possible_wrap,
cast_possible_truncation, filter_map, if_not_else, items_after_statements,
invalid_upcast_comparisons, mutex_integer, nonminimal_bool, shadow_same, shadow_unrelated,
single_match_else, string_add, string_add_assign, wrong_pub_self_convention)]

extern crate alloc;
extern crate ralloc_shim as shim;

#[macro_use]
mod log;
#[macro_use]
#[cfg(feature = "tls")]
mod tls;
#[cfg(feature = "allocator")]
mod symbols;

#[macro_use]
mod unborrow;
Expand All @@ -49,8 +48,47 @@ mod ptr;
mod sync;
mod vec;

use alloc::heap::{Alloc, AllocErr, Layout, CannotReallocInPlace};

pub use allocator::{alloc, free, realloc, realloc_inplace};
pub use brk::sbrk;
pub use fail::set_oom_handler;
#[cfg(feature = "tls")]
pub use fail::set_thread_oom_handler;

pub struct Allocator;

unsafe impl<'a> Alloc for &'a Allocator {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
Ok(allocator::alloc(layout.size(), layout.align()))
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
allocator::free(ptr, layout.size());
}

unsafe fn realloc(&mut self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<*mut u8, AllocErr> {
Ok(allocator::realloc(ptr, layout.size(), new_layout.size(), new_layout.align()))
}

unsafe fn grow_in_place(&mut self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> {
if allocator::realloc_inplace(ptr, layout.size(), new_layout.size()).is_ok() {
Ok(())
} else {
Err(CannotReallocInPlace)
}
}

unsafe fn shrink_in_place(&mut self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> {
if allocator::realloc_inplace(ptr, layout.size(), new_layout.size()).is_ok() {
Ok(())
} else {
Err(CannotReallocInPlace)
}
}

fn usable_size(&self, layout: &Layout) -> (usize, usize) {
// Yay! It matches exactly.
(layout.size(), layout.size())
}
}
21 changes: 8 additions & 13 deletions src/ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ impl<T> Pointer<T> {
debug_assert!(!ptr.is_null(), "Null pointer!");

Pointer {
ptr: NonZero::new(ptr),
ptr: NonZero::new_unchecked(ptr),
_phantom: marker::PhantomData,
}
}
Expand All @@ -45,7 +45,7 @@ impl<T> Pointer<T> {
// LAST AUDIT: 2016-08-21 (Ticki).

// 0x1 is non-zero.
NonZero::new(0x1 as *mut T)
NonZero::new_unchecked(0x1 as *mut T)
},
_phantom: marker::PhantomData,
}
Expand All @@ -61,7 +61,7 @@ impl<T> Pointer<T> {
// LAST AUDIT: 2016-08-21 (Ticki).

// Casting the pointer will preserve its nullable state.
NonZero::new(*self as *mut U)
NonZero::new_unchecked(self.get() as *mut U)
},
_phantom: marker::PhantomData,
}
Expand All @@ -76,7 +76,11 @@ impl<T> Pointer<T> {
/// This is unsafe, due to OOB offsets being undefined behavior.
#[inline]
pub unsafe fn offset(self, diff: isize) -> Pointer<T> {
Pointer::new(self.ptr.offset(diff))
Pointer::new(self.ptr.get().offset(diff))
}

pub fn get(&self) -> *mut T {
self.ptr.get()
}
}

Expand All @@ -89,15 +93,6 @@ impl<T> Default for Pointer<T> {
unsafe impl<T: Send> Send for Pointer<T> {}
unsafe impl<T: Sync> Sync for Pointer<T> {}

impl<T> ops::Deref for Pointer<T> {
type Target = *mut T;

#[inline]
fn deref(&self) -> &*mut T {
&self.ptr
}
}

#[cfg(test)]
mod test {
use super::*;
Expand Down
51 changes: 0 additions & 51 deletions src/symbols.rs

This file was deleted.

4 changes: 2 additions & 2 deletions src/tls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ impl<T: 'static> Key<T> {
/// Having a reference newtype would be unsound, due to the ability to leak a reference to
/// another thread.
#[inline]
pub fn with<F, R>(&'static self, f: F) -> R
pub fn with<F, R>(&self, f: F) -> R
where F: FnOnce(&T) -> R {
// Logging.
log!(INTERNAL, "Accessing TLS variable.");
Expand All @@ -42,7 +42,7 @@ impl<T: 'static> Key<T> {
/// Note that this has to be registered for every thread, it is needed for.
// TODO: Make this automatic on `Drop`.
#[inline]
pub fn register_thread_destructor(&'static self, dtor: extern fn(&T)) {
pub fn register_thread_destructor(&self, dtor: extern fn(&T)) {
// Logging.
log!(INTERNAL, "Registering thread destructor.");

Expand Down
8 changes: 4 additions & 4 deletions src/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ impl<T: Leak> Vec<T> {

// Due to the invariants of `Block`, this copy is safe (the pointer is valid and
// unaliased).
ptr::copy_nonoverlapping(*old.ptr, *self.ptr, old.len);
ptr::copy_nonoverlapping(old.ptr.get(), self.ptr.get(), old.len);
}

Block::from(old)
Expand All @@ -95,7 +95,7 @@ impl<T: Leak> Vec<T> {

// By the invariants of this type (the size is bounded by the address space), this
// conversion isn't overflowing.
ptr::write((*self.ptr).offset(self.len as isize), elem);
ptr::write((self.ptr.get()).offset(self.len as isize), elem);
}

// Increment the length.
Expand Down Expand Up @@ -193,7 +193,7 @@ impl<T: Leak> ops::Deref for Vec<T> {
// LAST AUDIT: 2016-08-21 (Ticki).

// The invariants maintains safety.
slice::from_raw_parts(*self.ptr as *const T, self.len)
slice::from_raw_parts(self.ptr.get() as *const T, self.len)
}
}
}
Expand All @@ -205,7 +205,7 @@ impl<T: Leak> ops::DerefMut for Vec<T> {
// LAST AUDIT: 2016-08-21 (Ticki).

// The invariants maintains safety.
slice::from_raw_parts_mut(*self.ptr as *mut T, self.len)
slice::from_raw_parts_mut(self.ptr.get() as *mut T, self.len)
}
}
}
Expand Down

0 comments on commit cd8d627

Please sign in to comment.