Skip to content

Commit

Permalink
Merge #1217
Browse files Browse the repository at this point in the history
1217: Polymorphic host functions based on dynamic trampoline generation. r=losfair a=losfair

This PR implements polymorphic host functions by dynamically generating the "glue" code that translates platform arguments to an array in `runtime-core`.

TODO:

- ~~Multiple return values.~~ Deferring to a future multivalue PR.
- [x] Dynamic signatures for polymorphic functions.
- [x] Use a proper executable memory allocator.

Co-authored-by: losfair <zhy20000919@hotmail.com>
Co-authored-by: Ivan Enderlin <ivan.enderlin@hoa-project.net>
Co-authored-by: Heyang Zhou <zhy20000919@hotmail.com>
  • Loading branch information
3 people authored Mar 3, 2020
2 parents 115a657 + f499dea commit 22d2031
Show file tree
Hide file tree
Showing 6 changed files with 427 additions and 13 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

## **[Unreleased]**

- [#1217](https://github.com/wasmerio/wasmer/pull/1217) Polymorphic host functions based on dynamic trampoline generation.
- [#1252](https://github.com/wasmerio/wasmer/pull/1252) Allow `/` in wasi `--mapdir` wasm path.
- [#1212](https://github.com/wasmerio/wasmer/pull/1212) Add support for GDB JIT debugging:
- Add `--generate-debug-info` and `-g` flags to `wasmer run` to generate debug information during compilation. The debug info is passed via the GDB JIT interface to a debugger to allow source-level debugging of Wasm files. Currently only available on clif-backend.
Expand Down
27 changes: 25 additions & 2 deletions lib/runtime-core-tests/tests/imports.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
use std::sync::Arc;
use wasmer_runtime_core::{
compile_with, error::RuntimeError, imports, memory::Memory, typed_func::Func,
types::MemoryDescriptor, units::Pages, vm, Instance,
compile_with,
error::RuntimeError,
imports,
memory::Memory,
typed_func::{DynamicFunc, Func},
types::{FuncSig, MemoryDescriptor, Type, Value},
units::Pages,
vm, Instance,
};
use wasmer_runtime_core_tests::{get_compiler, wat2wasm};

Expand Down Expand Up @@ -68,6 +75,7 @@ fn imported_functions_forms(test: &dyn Fn(&Instance)) {
(import "env" "memory" (memory 1 1))
(import "env" "callback_fn" (func $callback_fn (type $type)))
(import "env" "callback_closure" (func $callback_closure (type $type)))
(import "env" "callback_closure_dynamic" (func $callback_closure_dynamic (type $type)))
(import "env" "callback_closure_with_env" (func $callback_closure_with_env (type $type)))
(import "env" "callback_fn_with_vmctx" (func $callback_fn_with_vmctx (type $type)))
(import "env" "callback_closure_with_vmctx" (func $callback_closure_with_vmctx (type $type)))
Expand All @@ -86,6 +94,10 @@ fn imported_functions_forms(test: &dyn Fn(&Instance)) {
get_local 0
call $callback_closure)
(func (export "function_closure_dynamic") (type $type)
get_local 0
call $callback_closure_dynamic)
(func (export "function_closure_with_env") (type $type)
get_local 0
call $callback_closure_with_env)
Expand Down Expand Up @@ -142,6 +154,16 @@ fn imported_functions_forms(test: &dyn Fn(&Instance)) {
Ok(n + 1)
}),

"callback_closure_dynamic" => DynamicFunc::new(
Arc::new(FuncSig::new(vec![Type::I32], vec![Type::I32])),
|_, params| -> Vec<Value> {
match params[0] {
Value::I32(x) => vec![Value::I32(x + 1)],
_ => unreachable!()
}
}
),

// Closure with a captured environment (a single variable + an instance of `Memory`).
"callback_closure_with_env" => Func::new(move |n: i32| -> Result<i32, ()> {
let shift_ = shift + memory.view::<i32>()[0].get();
Expand Down Expand Up @@ -236,6 +258,7 @@ macro_rules! test {

test!(test_fn, function_fn, Ok(2));
test!(test_closure, function_closure, Ok(2));
test!(test_closure_dynamic, function_closure_dynamic, Ok(2));
test!(
test_closure_with_env,
function_closure_with_env,
Expand Down
28 changes: 22 additions & 6 deletions lib/runtime-core/src/loader.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
//! The loader module functions are used to load an instance.
use crate::{backend::RunnableModule, module::ModuleInfo, types::Type, types::Value, vm::Ctx};
#[cfg(unix)]
use libc::{mmap, mprotect, munmap, MAP_ANON, MAP_PRIVATE, PROT_EXEC, PROT_READ, PROT_WRITE};
use libc::{
mmap, mprotect, munmap, MAP_ANON, MAP_NORESERVE, MAP_PRIVATE, PROT_EXEC, PROT_READ, PROT_WRITE,
};
use std::{
fmt::Debug,
ops::{Deref, DerefMut},
Expand Down Expand Up @@ -138,12 +140,12 @@ impl CodeMemory {
unimplemented!("CodeMemory::new");
}

/// Makes this code memory executable.
/// Makes this code memory executable and not writable.
pub fn make_executable(&self) {
unimplemented!("CodeMemory::make_executable");
}

/// Makes this code memory writable.
/// Makes this code memory writable and not executable.
pub fn make_writable(&self) {
unimplemented!("CodeMemory::make_writable");
}
Expand All @@ -169,7 +171,7 @@ impl CodeMemory {
std::ptr::null_mut(),
size,
PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANON,
MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
-1,
0,
)
Expand All @@ -183,19 +185,33 @@ impl CodeMemory {
}
}

/// Makes this code memory executable.
/// Makes this code memory executable and not writable.
pub fn make_executable(&self) {
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_EXEC) } != 0 {
panic!("cannot set code memory to executable");
}
}

/// Makes this code memory writable.
/// Makes this code memory writable and not executable.
pub fn make_writable(&self) {
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_WRITE) } != 0 {
panic!("cannot set code memory to writable");
}
}

/// Makes this code memory both writable and executable.
///
/// Avoid using this if a combination `make_executable` and `make_writable` can be used.
pub fn make_writable_executable(&self) {
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_WRITE | PROT_EXEC) } != 0 {
panic!("cannot set code memory to writable and executable");
}
}

/// Returns the backing pointer of this code memory.
pub fn get_backing_ptr(&self) -> *mut u8 {
self.ptr
}
}

#[cfg(unix)]
Expand Down
195 changes: 195 additions & 0 deletions lib/runtime-core/src/trampoline_x64.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,10 @@

use crate::loader::CodeMemory;
use crate::vm::Ctx;
use std::collections::BTreeMap;
use std::fmt;
use std::ptr::NonNull;
use std::sync::Mutex;
use std::{mem, slice};

lazy_static! {
Expand All @@ -29,6 +32,96 @@ lazy_static! {
mem::transmute(ptr)
}
};

static ref TRAMPOLINES: TrampBuffer = TrampBuffer::new(64 * 1048576);
}

/// The global trampoline buffer.
struct TrampBuffer {
/// A fixed-(virtual)-size executable+writable buffer for storing trampolines.
buffer: CodeMemory,

/// Allocation state.
alloc: Mutex<AllocState>,
}

/// The allocation state of a `TrampBuffer`.
struct AllocState {
/// Records all allocated blocks in `buffer`.
///
/// Maps the start address of each block to its end address.
blocks: BTreeMap<usize, usize>,
}

impl TrampBuffer {
/// Creates a trampoline buffer with a given (virtual) size.
fn new(size: usize) -> TrampBuffer {
let mem = CodeMemory::new(size);
mem.make_writable_executable();
TrampBuffer {
buffer: mem,
alloc: Mutex::new(AllocState {
blocks: BTreeMap::new(),
}),
}
}

/// Removes a previously-`insert`ed trampoline.
///
/// For safety, refer to the public interface `TrampolineBufferBuilder::remove_global`.
unsafe fn remove(&self, start: NonNull<u8>) {
let start = start.as_ptr() as usize - self.buffer.get_backing_ptr() as usize;
let mut alloc = self.alloc.lock().unwrap();
alloc
.blocks
.remove(&start)
.expect("TrampBuffer::remove(): Attempting to remove a non-existent allocation.");
}

/// Allocates a region of executable memory and copies `buf` to the end of this region.
///
/// Returns `None` if no memory is available.
fn insert(&self, buf: &[u8]) -> Option<NonNull<u8>> {
// First, assume an available start position...
let mut assumed_start: usize = 0;

let mut alloc = self.alloc.lock().unwrap();
let mut found = false;

// Then, try invalidating that assumption...
for (&start, &end) in &alloc.blocks {
if start - assumed_start < buf.len() {
// Unavailable. Move to next free block.
assumed_start = end;
} else {
// This free block can be used.
found = true;
break;
}
}

if !found {
// No previous free blocks were found. Try allocating at the end.
if self.buffer.len() - assumed_start < buf.len() {
// No more free space. Cannot allocate.
return None;
}
}

// Now we know `assumed_start` is valid.
let start = assumed_start;
alloc.blocks.insert(start, start + buf.len());

// We have unique ownership to `self.buffer[start..start + buf.len()]`.
let slice = unsafe {
std::slice::from_raw_parts_mut(
self.buffer.get_backing_ptr().offset(start as _),
buf.len(),
)
};
slice.copy_from_slice(buf);
Some(NonNull::new(slice.as_mut_ptr()).unwrap())
}
}

/// An opaque type for pointers to a callable memory location.
Expand Down Expand Up @@ -219,6 +312,27 @@ impl TrampolineBufferBuilder {
idx
}

/// Inserts this trampoline to the global trampoline buffer.
pub fn insert_global(self) -> Option<NonNull<u8>> {
TRAMPOLINES.insert(&self.code)
}

/// Removes the trampoline pointed to by `ptr` from the global trampoline buffer. Panics if `ptr`
/// does not point to any trampoline.
///
/// # Safety
///
/// Calling this function invalidates the trampoline `ptr` points to and recycles its memory. You
/// should ensure that `ptr` isn't used after calling `remove_global`.
pub unsafe fn remove_global(ptr: NonNull<u8>) {
TRAMPOLINES.remove(ptr);
}

/// Gets the current (non-executable) code in this builder.
pub fn code(&self) -> &[u8] {
&self.code
}

/// Consumes the builder and builds the trampoline buffer.
pub fn build(self) -> TrampolineBuffer {
get_context(); // ensure lazy initialization is completed
Expand Down Expand Up @@ -292,4 +406,85 @@ mod tests {
};
assert_eq!(ret, 136);
}

#[test]
fn test_many_global_trampolines() {
unsafe extern "C" fn inner(n: *const CallContext, args: *const u64) -> u64 {
let n = n as usize;
let mut result: u64 = 0;
for i in 0..n {
result += *args.offset(i as _);
}
result
}

// Use the smallest possible buffer size (page size) to check memory releasing logic.
let buffer = TrampBuffer::new(4096);

// Validate the previous trampoline instead of the current one to ensure that no overwrite happened.
let mut prev: Option<(NonNull<u8>, u64)> = None;

for i in 0..5000usize {
let mut builder = TrampolineBufferBuilder::new();
let n = i % 8;
builder.add_callinfo_trampoline(inner, n as _, n as _);
let ptr = buffer
.insert(builder.code())
.expect("cannot insert new code into global buffer");

if let Some((ptr, expected)) = prev.take() {
use std::mem::transmute;

// Test different argument counts.
unsafe {
match expected {
0 => {
let f = transmute::<_, extern "C" fn() -> u64>(ptr);
assert_eq!(f(), 0);
}
1 => {
let f = transmute::<_, extern "C" fn(u64) -> u64>(ptr);
assert_eq!(f(1), 1);
}
3 => {
let f = transmute::<_, extern "C" fn(u64, u64) -> u64>(ptr);
assert_eq!(f(1, 2), 3);
}
6 => {
let f = transmute::<_, extern "C" fn(u64, u64, u64) -> u64>(ptr);
assert_eq!(f(1, 2, 3), 6);
}
10 => {
let f = transmute::<_, extern "C" fn(u64, u64, u64, u64) -> u64>(ptr);
assert_eq!(f(1, 2, 3, 4), 10);
}
15 => {
let f =
transmute::<_, extern "C" fn(u64, u64, u64, u64, u64) -> u64>(ptr);
assert_eq!(f(1, 2, 3, 4, 5), 15);
}
21 => {
let f = transmute::<
_,
extern "C" fn(u64, u64, u64, u64, u64, u64) -> u64,
>(ptr);
assert_eq!(f(1, 2, 3, 4, 5, 6), 21);
}
28 => {
let f = transmute::<
_,
extern "C" fn(u64, u64, u64, u64, u64, u64, u64) -> u64,
>(ptr);
assert_eq!(f(1, 2, 3, 4, 5, 6, 7), 28);
}
_ => unreachable!(),
}
buffer.remove(ptr);
}
}

let expected = (0..=n as u64).sum();
prev = Some((ptr, expected))
}
}
}
Loading

0 comments on commit 22d2031

Please sign in to comment.