From ca89bd5d75d1694aa43987d0e6691aa0db127bab Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 19 Nov 2020 11:01:56 +0100 Subject: [PATCH 01/27] =?UTF-8?q?feat(api)=20Rename=20`Extern::from=5Fexpo?= =?UTF-8?q?rt`=20to=20`=E2=80=A6::from=5Fvm=5Fexport`.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I hope this little change will clarify a little bit that the `Export` passed to `Extern::from_vm_export` is not a `wasmer::Export` but a `wasmer_vm::Export`. --- lib/api/src/externals/function.rs | 2 +- lib/api/src/externals/global.rs | 2 +- lib/api/src/externals/memory.rs | 2 +- lib/api/src/externals/mod.rs | 12 ++++++------ lib/api/src/externals/table.rs | 2 +- lib/api/src/instance.rs | 2 +- lib/api/src/types.rs | 2 +- lib/c-api/src/wasm_c_api/wasi/mod.rs | 2 +- lib/deprecated/runtime-core/src/module.rs | 4 ++-- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/lib/api/src/externals/function.rs b/lib/api/src/externals/function.rs index 3a8adc51066..cf5bc04d5ff 100644 --- a/lib/api/src/externals/function.rs +++ b/lib/api/src/externals/function.rs @@ -431,7 +431,7 @@ impl Function { Ok(results.into_boxed_slice()) } - pub(crate) fn from_export(store: &Store, wasmer_export: ExportFunction) -> Self { + pub(crate) fn from_vm_export(store: &Store, wasmer_export: ExportFunction) -> Self { if let Some(trampoline) = wasmer_export.call_trampoline { Self { store: store.clone(), diff --git a/lib/api/src/externals/global.rs b/lib/api/src/externals/global.rs index a306b9a94e5..6dcac0d35dc 100644 --- a/lib/api/src/externals/global.rs +++ b/lib/api/src/externals/global.rs @@ -180,7 +180,7 @@ impl Global { Ok(()) } - pub(crate) fn from_export(store: &Store, wasmer_export: ExportGlobal) -> Self { + pub(crate) fn from_vm_export(store: &Store, wasmer_export: ExportGlobal) -> Self { Self { store: store.clone(), global: wasmer_export.from, diff --git a/lib/api/src/externals/memory.rs b/lib/api/src/externals/memory.rs index 32583c82fef..16dc7b202fb 100644 --- a/lib/api/src/externals/memory.rs +++ b/lib/api/src/externals/memory.rs @@ -220,7 +220,7 @@ impl Memory { unsafe { MemoryView::new(base as _, length as u32) } } - pub(crate) fn from_export(store: &Store, wasmer_export: ExportMemory) -> Self { + pub(crate) fn from_vm_export(store: &Store, wasmer_export: ExportMemory) -> Self { Self { store: store.clone(), memory: wasmer_export.from, diff --git a/lib/api/src/externals/mod.rs b/lib/api/src/externals/mod.rs index 370a3173df4..ba073d6ca6f 100644 --- a/lib/api/src/externals/mod.rs +++ b/lib/api/src/externals/mod.rs @@ -43,13 +43,13 @@ impl Extern { } } - /// Create an `Extern` from an `Export`. - pub fn from_export(store: &Store, export: Export) -> Self { + /// Create an `Extern` from an `wasmer_vm::Export`. + pub fn from_vm_export(store: &Store, export: Export) -> Self { match export { - Export::Function(f) => Self::Function(Function::from_export(store, f)), - Export::Memory(m) => Self::Memory(Memory::from_export(store, m)), - Export::Global(g) => Self::Global(Global::from_export(store, g)), - Export::Table(t) => Self::Table(Table::from_export(store, t)), + Export::Function(f) => Self::Function(Function::from_vm_export(store, f)), + Export::Memory(m) => Self::Memory(Memory::from_vm_export(store, m)), + Export::Global(g) => Self::Global(Global::from_vm_export(store, g)), + Export::Table(t) => Self::Table(Table::from_vm_export(store, t)), } } } diff --git a/lib/api/src/externals/table.rs b/lib/api/src/externals/table.rs index 4bcbf257584..92e145cae0c 100644 --- a/lib/api/src/externals/table.rs +++ b/lib/api/src/externals/table.rs @@ -139,7 +139,7 @@ impl Table { Ok(()) } - pub(crate) fn from_export(store: &Store, wasmer_export: ExportTable) -> Self { + pub(crate) fn from_vm_export(store: &Store, wasmer_export: ExportTable) -> Self { Self { store: store.clone(), table: wasmer_export.from, diff --git a/lib/api/src/instance.rs b/lib/api/src/instance.rs index 50b67739d6d..21986eea8e1 100644 --- a/lib/api/src/instance.rs +++ b/lib/api/src/instance.rs @@ -80,7 +80,7 @@ impl Instance { .map(|export| { let name = export.name().to_string(); let export = handle.lookup(&name).expect("export"); - let extern_ = Extern::from_export(store, export); + let extern_ = Extern::from_vm_export(store, export); (name, extern_) }) .collect::(); diff --git a/lib/api/src/types.rs b/lib/api/src/types.rs index 80924c33cbd..638d79d3b22 100644 --- a/lib/api/src/types.rs +++ b/lib/api/src/types.rs @@ -82,7 +82,7 @@ impl ValFuncRef for Val { vmctx: item.vmctx, call_trampoline: None, }; - let f = Function::from_export(store, export); + let f = Function::from_vm_export(store, export); Self::FuncRef(f) } } diff --git a/lib/c-api/src/wasm_c_api/wasi/mod.rs b/lib/c-api/src/wasm_c_api/wasi/mod.rs index 68516c66a12..13823b4568c 100644 --- a/lib/c-api/src/wasm_c_api/wasi/mod.rs +++ b/lib/c-api/src/wasm_c_api/wasi/mod.rs @@ -335,7 +335,7 @@ unsafe fn wasi_get_imports_inner( import_type.name() ), })); - let inner = Extern::from_export(store, export); + let inner = Extern::from_vm_export(store, export); Some(Box::new(wasm_extern_t { instance: None, diff --git a/lib/deprecated/runtime-core/src/module.rs b/lib/deprecated/runtime-core/src/module.rs index 7ef483dcb85..b43166dd346 100644 --- a/lib/deprecated/runtime-core/src/module.rs +++ b/lib/deprecated/runtime-core/src/module.rs @@ -159,12 +159,12 @@ impl Module { ( (namespace, name), - new::wasmer::Extern::from_export(store, Export::Function(function)), + new::wasmer::Extern::from_vm_export(store, Export::Function(function)), ) } export => ( (namespace, name), - new::wasmer::Extern::from_export(store, export), + new::wasmer::Extern::from_vm_export(store, export), ), }) .for_each(|((namespace, name), extern_)| { From f338b688647a0adcdfcde6ebaa0aa6a2de4c5102 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 19 Nov 2020 15:10:58 +0100 Subject: [PATCH 02/27] feat(vm) Change visibility of some `Instance`'s methods to `pub(self)`. Those methods have a `pub` or `pub(crate)` visibility but it's not required. Let's reduce their visibility. --- lib/vm/src/instance.rs | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 0991992b2b4..d145a7ea2e4 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -126,11 +126,11 @@ impl Instance { unsafe { *self.signature_ids_ptr().add(index) } } - pub(crate) fn module(&self) -> &Arc { + fn module(&self) -> &Arc { &self.module } - pub(crate) fn module_ref(&self) -> &ModuleInfo { + fn module_ref(&self) -> &ModuleInfo { &*self.module } @@ -209,7 +209,7 @@ impl Instance { } /// Get a locally defined or imported memory. - pub(crate) fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition { + fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition { if let Some(local_index) = self.module.local_memory_index(index) { self.memory(local_index) } else { @@ -273,12 +273,12 @@ impl Instance { } /// Return a reference to the vmctx used by compiled wasm code. - pub fn vmctx(&self) -> &VMContext { + fn vmctx(&self) -> &VMContext { &self.vmctx } /// Return a raw pointer to the vmctx used by compiled wasm code. - pub fn vmctx_ptr(&self) -> *mut VMContext { + fn vmctx_ptr(&self) -> *mut VMContext { self.vmctx() as *const VMContext as *mut VMContext } @@ -416,7 +416,7 @@ impl Instance { } /// Return the table index for the given `VMTableDefinition`. - pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex { + fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex { let offsets = &self.offsets; let begin = unsafe { (&self.vmctx as *const VMContext as *const u8) @@ -432,7 +432,7 @@ impl Instance { } /// Return the memory index for the given `VMMemoryDefinition`. - pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex { + fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex { let offsets = &self.offsets; let begin = unsafe { (&self.vmctx as *const VMContext as *const u8) @@ -551,6 +551,7 @@ impl Instance { .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap()) .unwrap(); let align = mem::align_of::(); + Layout::from_size_align(size, align).unwrap() } From 955068cc34c7492e025db64c37e380435b1485e3 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 19 Nov 2020 15:23:16 +0100 Subject: [PATCH 03/27] feat(vm) Move `Instance::(lookup|lookup_by_declaration|exports)` methods. This patch moves the `lookup`, `lookup_by_declaration`, and `exports` methods from `Instance` to `InstanceHandle`. Basically, all methods that creates `Export` instances. --- lib/vm/src/instance.rs | 159 +++++++++++++++++++---------------------- 1 file changed, 73 insertions(+), 86 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index d145a7ea2e4..31184943c29 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -282,89 +282,6 @@ impl Instance { self.vmctx() as *const VMContext as *mut VMContext } - /// Lookup an export with the given name. - pub fn lookup(&self, field: &str) -> Option { - let export = if let Some(export) = self.module.exports.get(field) { - export.clone() - } else { - return None; - }; - Some(self.lookup_by_declaration(&export)) - } - - /// Lookup an export with the given export declaration. - pub fn lookup_by_declaration(&self, export: &ExportIndex) -> Export { - match export { - ExportIndex::Function(index) => { - let sig_index = &self.module.functions[*index]; - let (address, vmctx) = if let Some(def_index) = self.module.local_func_index(*index) - { - ( - self.functions[def_index].0 as *const _, - VMFunctionEnvironment { - vmctx: self.vmctx_ptr(), - }, - ) - } else { - let import = self.imported_function(*index); - (import.body, import.environment) - }; - let call_trampoline = Some(self.function_call_trampolines[*sig_index]); - let signature = self.module.signatures[*sig_index].clone(); - ExportFunction { - address, - // Any function received is already static at this point as: - // 1. All locally defined functions in the Wasm have a static signature. - // 2. All the imported functions are already static (because - // they point to the trampolines rather than the dynamic addresses). - kind: VMFunctionKind::Static, - signature, - vmctx, - call_trampoline, - } - .into() - } - ExportIndex::Table(index) => { - let from = if let Some(def_index) = self.module.local_table_index(*index) { - self.tables[def_index].clone() - } else { - let import = self.imported_table(*index); - import.from.clone() - }; - ExportTable { from }.into() - } - ExportIndex::Memory(index) => { - let from = if let Some(def_index) = self.module.local_memory_index(*index) { - self.memories[def_index].clone() - } else { - let import = self.imported_memory(*index); - import.from.clone() - }; - ExportMemory { from }.into() - } - ExportIndex::Global(index) => { - let from = { - if let Some(def_index) = self.module.local_global_index(*index) { - self.globals[def_index].clone() - } else { - let import = self.imported_global(*index); - import.from.clone() - } - }; - ExportGlobal { from }.into() - } - } - } - - /// Return an iterator over the exports of this instance. - /// - /// Specifically, it provides access to the key-value pairs, where the keys - /// are export names, and the values are export declarations which can be - /// resolved `lookup_by_declaration`. - pub fn exports(&self) -> indexmap::map::Iter { - self.module.exports.iter() - } - /// Return a reference to the custom state attached to this instance. #[inline] pub fn host_state(&self) -> &dyn Any { @@ -1052,12 +969,82 @@ impl InstanceHandle { /// Lookup an export with the given name. pub fn lookup(&self, field: &str) -> Option { - self.instance().lookup(field) + let export = self.module().exports.get(field)?; + + Some(self.lookup_by_declaration(&export)) } /// Lookup an export with the given export declaration. pub fn lookup_by_declaration(&self, export: &ExportIndex) -> Export { - self.instance().lookup_by_declaration(export) + let instance = self.instance(); + + match export { + ExportIndex::Function(index) => { + let sig_index = &instance.module.functions[*index]; + let (address, vmctx) = + if let Some(def_index) = instance.module.local_func_index(*index) { + ( + instance.functions[def_index].0 as *const _, + VMFunctionEnvironment { + vmctx: instance.vmctx_ptr(), + }, + ) + } else { + let import = instance.imported_function(*index); + (import.body, import.environment) + }; + let call_trampoline = Some(instance.function_call_trampolines[*sig_index]); + let signature = instance.module.signatures[*sig_index].clone(); + + ExportFunction { + address, + // Any function received is already static at this point as: + // 1. All locally defined functions in the Wasm have a static signature. + // 2. All the imported functions are already static (because + // they point to the trampolines rather than the dynamic addresses). + kind: VMFunctionKind::Static, + signature, + vmctx, + call_trampoline, + } + .into() + } + + ExportIndex::Table(index) => { + let from = if let Some(def_index) = instance.module.local_table_index(*index) { + instance.tables[def_index].clone() + } else { + let import = instance.imported_table(*index); + import.from.clone() + }; + + ExportTable { from }.into() + } + + ExportIndex::Memory(index) => { + let from = if let Some(def_index) = instance.module.local_memory_index(*index) { + instance.memories[def_index].clone() + } else { + let import = instance.imported_memory(*index); + import.from.clone() + }; + + ExportMemory { from }.into() + } + + ExportIndex::Global(index) => { + let from = { + if let Some(def_index) = instance.module.local_global_index(*index) { + instance.globals[def_index].clone() + } else { + let import = instance.imported_global(*index); + import.from.clone() + } + }; + + ExportGlobal { from }.into() + } + } } /// Return an iterator over the exports of this instance. @@ -1066,7 +1053,7 @@ impl InstanceHandle { /// are export names, and the values are export declarations which can be /// resolved `lookup_by_declaration`. pub fn exports(&self) -> indexmap::map::Iter { - self.instance().exports() + self.module().exports.iter() } /// Return a reference to the custom state attached to this instance. From b93af3b628a99aabf227aee31489bbf039653f97 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Mon, 23 Nov 2020 17:22:59 +0100 Subject: [PATCH 04/27] start big patch --- lib/vm/src/export.rs | 13 + lib/vm/src/instance.rs | 443 ++++++++++++++++++-------------- lib/vm/src/trap/traphandlers.rs | 5 +- 3 files changed, 267 insertions(+), 194 deletions(-) diff --git a/lib/vm/src/export.rs b/lib/vm/src/export.rs index 86b09befbb5..4675950be0f 100644 --- a/lib/vm/src/export.rs +++ b/lib/vm/src/export.rs @@ -2,6 +2,7 @@ // Attributions: https://github.com/wasmerio/wasmer/blob/master/ATTRIBUTIONS.md use crate::global::Global; +use crate::instance::InstanceAllocator; use crate::memory::{Memory, MemoryStyle}; use crate::table::{Table, TableStyle}; use crate::vmcontext::{VMFunctionBody, VMFunctionEnvironment, VMFunctionKind, VMTrampoline}; @@ -38,6 +39,9 @@ pub struct ExportFunction { /// Address of the function call trampoline owned by the same VMContext that owns the VMFunctionBody. /// May be None when the function is a host-function (FunctionType == Dynamic or vmctx == nullptr). pub call_trampoline: Option, + ///// A reference to the instance, to ensure it's not deallocated + ///// before the function. + //pub instance_allocator: Option>, } /// # Safety @@ -59,6 +63,9 @@ impl From for Export { pub struct ExportTable { /// Pointer to the containing `Table`. pub from: Arc, + ///// A reference to the instance, to ensure it's not deallocated + ///// before the table. + //pub instance_allocator: Option>, } /// # Safety @@ -100,6 +107,9 @@ impl From for Export { pub struct ExportMemory { /// Pointer to the containing `Memory`. pub from: Arc, + ///// A reference to the instance, to ensure it's not deallocated + ///// before the memory. + //pub instance_allocator: Option>, } /// # Safety @@ -141,6 +151,9 @@ impl From for Export { pub struct ExportGlobal { /// The global declaration, used for compatibility checking. pub from: Arc, + ///// A reference to the instance, to ensure it's not deallocated + ///// before the global. + //pub instance_allocator: Option>, } /// # Safety diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 31184943c29..7352e18cc34 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -3,7 +3,7 @@ //! An `Instance` contains all the runtime state used by execution of a //! wasm module (except its callstack and register state). An -//! `InstanceHandle` is a reference-counting handle for an `Instance`. +//! `InstanceHandle` is a wrapper around an `Instance`. use crate::export::Export; use crate::global::Global; use crate::imports::Imports; @@ -25,6 +25,7 @@ use std::any::Any; use std::cell::{Cell, RefCell}; use std::collections::HashMap; use std::convert::{TryFrom, TryInto}; +use std::fmt; use std::ptr::NonNull; use std::sync::Arc; use std::{mem, ptr, slice}; @@ -45,7 +46,7 @@ cfg_if::cfg_if! { where H: 'static + Fn(libc::c_int, *const libc::siginfo_t, *const libc::c_void) -> bool, { - self.instance().signal_handler.set(Some(Box::new(handler))); + self.instance().instance_ref().signal_handler.set(Some(Box::new(handler))); } } } else if #[cfg(target_os = "windows")] { @@ -57,7 +58,7 @@ cfg_if::cfg_if! { where H: 'static + Fn(winapi::um::winnt::PEXCEPTION_POINTERS) -> bool, { - self.instance().signal_handler.set(Some(Box::new(handler))); + self.instance().instance_ref().signal_handler.set(Some(Box::new(handler))); } } } @@ -110,6 +111,12 @@ pub(crate) struct Instance { vmctx: VMContext, } +impl fmt::Debug for Instance { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.debug_struct("Instance").finish() + } +} + #[allow(clippy::cast_ptr_alignment)] impl Instance { /// Helper function to access various locations offset from our `*mut @@ -463,15 +470,6 @@ impl Instance { .set(index, val) } - fn alloc_layout(offsets: &VMOffsets) -> Layout { - let size = mem::size_of::() - .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap()) - .unwrap(); - let align = mem::align_of::(); - - Layout::from_size_align(size, align).unwrap() - } - /// Get a `VMCallerCheckedAnyfunc` for the given `FunctionIndex`. fn get_caller_checked_anyfunc(&self, index: FunctionIndex) -> VMCallerCheckedAnyfunc { if index == FunctionIndex::reserved_value() { @@ -691,99 +689,93 @@ impl Instance { } } -/// A handle holding an `Instance` of a WebAssembly module. -#[derive(Hash, PartialEq, Eq)] -pub struct InstanceHandle { - instance: *mut Instance, - - /// Whether `Self` owns `self.instance`. It's not always the case; - /// e.g. when `Self` is built with `Self::from_vmctx`. - /// - /// This information is necessary to know whether `Self` should - /// deallocate `self.instance`. - owned_instance: bool, +#[derive(Debug)] +#[repr(C)] +pub struct InstanceAllocator { + ptr_to_dealloc: *const Arc, + ptr_layout: Layout, + instance: mem::ManuallyDrop, } -/// # Safety -/// This is safe because there is no thread-specific logic in `InstanceHandle`. -/// TODO: this needs extra review -unsafe impl Send for InstanceHandle {} +impl InstanceAllocator { + #[inline(always)] + const fn new(instance: Instance, ptr_to_dealloc: *const Arc, ptr_layout: Layout) -> Self { + Self { + instance: mem::ManuallyDrop::new(instance), + ptr_to_dealloc, + ptr_layout, + } + } -impl InstanceHandle { - /// Allocates an instance for use with `InstanceHandle::new`. - /// - /// Returns the instance pointer and the [`VMOffsets`] that describe the - /// memory buffer pointed to by the instance pointer. - pub fn allocate_instance(module: &ModuleInfo) -> (NonNull, VMOffsets) { - let offsets = VMOffsets::new(mem::size_of::<*const u8>() as u8, module); + fn instance_layout(offsets: &VMOffsets) -> Layout { + let size = mem::size_of::>>() + .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap()) + .unwrap(); + let align = mem::align_of::>>(); - let layout = Instance::alloc_layout(&offsets); + Layout::from_size_align(size, align).unwrap() + } + + fn allocate_instance(offsets: &VMOffsets) -> NonNull { + let layout = Self::instance_layout(offsets); + + dbg!(&layout); #[allow(clippy::cast_ptr_alignment)] - let instance_ptr = unsafe { alloc::alloc(layout) as *mut Instance }; - let ptr = if let Some(ptr) = NonNull::new(instance_ptr) { + let arc_instance_ptr = unsafe { alloc::alloc(layout) as *mut Arc }; + + let ptr = if let Some(ptr) = NonNull::new(arc_instance_ptr) { ptr.cast() } else { alloc::handle_alloc_error(layout); }; - (ptr, offsets) + ptr } - /// Get the locations of where the local `VMMemoryDefinition`s should be stored. - /// - /// This function lets us create `Memory` objects on the host with backing - /// memory in the VM. - /// - /// # Safety - /// - `instance_ptr` must point to enough memory that all of the offsets in - /// `offsets` point to valid locations in memory. - pub unsafe fn memory_definition_locations( - instance_ptr: NonNull, - offsets: &VMOffsets, - ) -> Vec> { - let num_memories = offsets.num_local_memories; - let num_memories = usize::try_from(num_memories).unwrap(); - let mut out = Vec::with_capacity(num_memories); - // TODO: better encapsulate this logic, this shouldn't be duplicated - let base_ptr = instance_ptr.as_ptr().add(std::mem::size_of::()); - for i in 0..num_memories { - let mem_offset = offsets.vmctx_vmmemory_definition(LocalMemoryIndex::new(i)); - let mem_offset = usize::try_from(mem_offset).unwrap(); + unsafe fn deallocate_instance(&mut self) { + mem::ManuallyDrop::drop(&mut self.instance); + std::alloc::dealloc(self.ptr_to_dealloc as *mut _, self.ptr_layout); + } - let new_ptr = NonNull::new_unchecked(base_ptr.add(mem_offset)); + #[inline(always)] + pub(crate) fn instance_ref(&self) -> &Instance { + &self.instance + } +} - out.push(new_ptr.cast()); - } - out +impl PartialEq for InstanceAllocator { + fn eq(&self, other: &Self) -> bool { + self.ptr_to_dealloc == other.ptr_to_dealloc } +} - /// Get the locations of where the `VMTableDefinition`s should be stored. - /// - /// This function lets us create `Table` objects on the host with backing - /// memory in the VM. - /// - /// # Safety - /// - `instance_ptr` must point to enough memory that all of the offsets in - /// `offsets` point to valid locations in memory. - pub unsafe fn table_definition_locations( - instance_ptr: NonNull, - offsets: &VMOffsets, - ) -> Vec> { - let num_tables = offsets.num_local_tables; - let num_tables = usize::try_from(num_tables).unwrap(); - let mut out = Vec::with_capacity(num_tables); - // TODO: better encapsulate this logic, this shouldn't be duplicated - let base_ptr = instance_ptr.as_ptr().add(std::mem::size_of::()); - for i in 0..num_tables { - let table_offset = offsets.vmctx_vmtable_definition(LocalTableIndex::new(i)); - let table_offset = usize::try_from(table_offset).unwrap(); +impl Drop for InstanceAllocator { + fn drop(&mut self) { + unsafe { Self::deallocate_instance(self) }; + } +} - let new_ptr = NonNull::new_unchecked(base_ptr.add(table_offset)); +/// A handle holding an `Instance` of a WebAssembly module. +//#[derive(Hash, PartialEq, Eq)] +pub struct InstanceHandle { + instance: Arc, +} - out.push(new_ptr.cast()); - } - out +/// # Safety +/// This is safe because there is no thread-specific logic in `InstanceHandle`. +/// TODO: this needs extra review +unsafe impl Send for InstanceHandle {} + +impl InstanceHandle { + /// Allocates an instance for use with `InstanceHandle::new`. + /// + /// Returns the instance pointer and the [`VMOffsets`] that describe the + /// memory buffer pointed to by the instance pointer. + pub fn allocate_instance(module: &ModuleInfo) -> (NonNull, VMOffsets) { + let offsets = VMOffsets::new(mem::size_of::<*const u8>() as u8, module); + + (InstanceAllocator::allocate_instance(&offsets), offsets) } /// Create a new `InstanceHandle` pointing at a new `Instance`. @@ -813,7 +805,7 @@ impl InstanceHandle { /// all the local memories. #[allow(clippy::too_many_arguments)] pub unsafe fn new( - instance_ptr: NonNull, + arc_instance_ptr: NonNull, offsets: VMOffsets, module: Arc, finished_functions: BoxedSlice, @@ -825,39 +817,47 @@ impl InstanceHandle { vmshared_signatures: BoxedSlice, host_state: Box, ) -> Result { - let instance_ptr = instance_ptr.cast::().as_ptr(); - + let arc_instance_ptr = arc_instance_ptr.cast::>().as_ptr(); let vmctx_globals = finished_globals .values() .map(|m| m.vmglobal()) .collect::>() .into_boxed_slice(); - let passive_data = RefCell::new(module.passive_data.clone()); let handle = { - let instance = Instance { - module, - offsets, - memories: finished_memories, - tables: finished_tables, - globals: finished_globals, - functions: finished_functions, - function_call_trampolines: finished_function_call_trampolines, - passive_elements: Default::default(), - passive_data, - host_state, - signal_handler: Cell::new(None), - vmctx: VMContext {}, - }; - ptr::write(instance_ptr, instance); - - Self { - instance: instance_ptr, - owned_instance: true, - } + let layout = InstanceAllocator::instance_layout(&offsets); + dbg!(&layout); + let arc_instance = Arc::new(InstanceAllocator::new( + Instance { + module, + offsets, + memories: finished_memories, + tables: finished_tables, + globals: finished_globals, + functions: finished_functions, + function_call_trampolines: finished_function_call_trampolines, + passive_elements: Default::default(), + passive_data, + host_state, + signal_handler: Cell::new(None), + vmctx: VMContext {}, + }, + arc_instance_ptr, + layout, + )); + + let instance_ptr = Arc::as_ptr(&arc_instance); + debug_assert_eq!(Arc::strong_count(&arc_instance), 1); + + ptr::write(arc_instance_ptr, arc_instance); + + let instance = Arc::from_raw(instance_ptr); + debug_assert_eq!(Arc::strong_count(&instance), 1); + + Self { instance } }; - let instance = handle.instance(); + let instance = handle.instance().instance_ref(); ptr::copy( vmshared_signatures.values().as_slice().as_ptr(), @@ -908,6 +908,88 @@ impl InstanceHandle { Ok(handle) } + /// Create a new `InstanceHandle` pointing at the instance + /// pointed to by the given `VMContext` pointer. + /// + /// # Safety + /// This is unsafe because it doesn't work on just any `VMContext`, it must + /// be a `VMContext` allocated as part of an `Instance`. + pub(crate) unsafe fn from_vmctx(vmctx: *mut VMContext) -> Self { + /* + let instance = (&*vmctx).instance(); + + Self { + instance: Arc::from_raw(instance as *const InstanceAllocator), + // We consider `vmctx` owns the `Instance`, not `Self`. + //owned_instance: false, + } + */ + todo!() + } + + /// Return a reference to the contained `Instance`. + pub(crate) fn instance(&self) -> &Arc { + &self.instance + } + + /// Get the locations of where the local `VMMemoryDefinition`s should be stored. + /// + /// This function lets us create `Memory` objects on the host with backing + /// memory in the VM. + /// + /// # Safety + /// - `instance_ptr` must point to enough memory that all of the offsets in + /// `offsets` point to valid locations in memory. + pub unsafe fn memory_definition_locations( + instance_ptr: NonNull, + offsets: &VMOffsets, + ) -> Vec> { + let num_memories = offsets.num_local_memories; + let num_memories = usize::try_from(num_memories).unwrap(); + let mut out = Vec::with_capacity(num_memories); + // TODO: better encapsulate this logic, this shouldn't be duplicated + let base_ptr = instance_ptr.as_ptr().add(std::mem::size_of::()); + + for i in 0..num_memories { + let mem_offset = offsets.vmctx_vmmemory_definition(LocalMemoryIndex::new(i)); + let mem_offset = usize::try_from(mem_offset).unwrap(); + + let new_ptr = NonNull::new_unchecked(base_ptr.add(mem_offset)); + + out.push(new_ptr.cast()); + } + out + } + + /// Get the locations of where the `VMTableDefinition`s should be stored. + /// + /// This function lets us create `Table` objects on the host with backing + /// memory in the VM. + /// + /// # Safety + /// - `instance_ptr` must point to enough memory that all of the offsets in + /// `offsets` point to valid locations in memory. + pub unsafe fn table_definition_locations( + instance_ptr: NonNull, + offsets: &VMOffsets, + ) -> Vec> { + let num_tables = offsets.num_local_tables; + let num_tables = usize::try_from(num_tables).unwrap(); + let mut out = Vec::with_capacity(num_tables); + // TODO: better encapsulate this logic, this shouldn't be duplicated + let base_ptr = instance_ptr.as_ptr().add(std::mem::size_of::()); + + for i in 0..num_tables { + let table_offset = offsets.vmctx_vmtable_definition(LocalTableIndex::new(i)); + let table_offset = usize::try_from(table_offset).unwrap(); + + let new_ptr = NonNull::new_unchecked(base_ptr.add(table_offset)); + + out.push(new_ptr.cast()); + } + out + } + /// Finishes the instantiation process started by `Instance::new`. /// /// # Safety @@ -917,54 +999,38 @@ impl InstanceHandle { &self, data_initializers: &[DataInitializer<'_>], ) -> Result<(), Trap> { - check_table_init_bounds(self.instance())?; - check_memory_init_bounds(self.instance(), data_initializers)?; + let instance = self.instance().instance_ref(); + check_table_init_bounds(instance)?; + check_memory_init_bounds(instance, data_initializers)?; // Apply the initializers. - initialize_tables(self.instance())?; - initialize_memories(self.instance(), data_initializers)?; + initialize_tables(instance)?; + initialize_memories(instance, data_initializers)?; // The WebAssembly spec specifies that the start function is // invoked automatically at instantiation time. - self.instance().invoke_start_function()?; + instance.invoke_start_function()?; Ok(()) } - /// Create a new `InstanceHandle` pointing at the instance - /// pointed to by the given `VMContext` pointer. - /// - /// # Safety - /// This is unsafe because it doesn't work on just any `VMContext`, it must - /// be a `VMContext` allocated as part of an `Instance`. - pub unsafe fn from_vmctx(vmctx: *mut VMContext) -> Self { - let instance = (&*vmctx).instance(); - - Self { - instance: instance as *const Instance as *mut Instance, - - // We consider `vmctx` owns the `Instance`, not `Self`. - owned_instance: false, - } - } - /// Return a reference to the vmctx used by compiled wasm code. pub fn vmctx(&self) -> &VMContext { - self.instance().vmctx() + self.instance().instance_ref().vmctx() } /// Return a raw pointer to the vmctx used by compiled wasm code. pub fn vmctx_ptr(&self) -> *mut VMContext { - self.instance().vmctx_ptr() + self.instance().instance_ref().vmctx_ptr() } /// Return a reference-counting pointer to a module. pub fn module(&self) -> &Arc { - self.instance().module() + self.instance().instance_ref().module() } /// Return a reference to a module. pub fn module_ref(&self) -> &ModuleInfo { - self.instance().module_ref() + self.instance().instance_ref().module_ref() } /// Lookup an export with the given name. @@ -976,25 +1042,26 @@ impl InstanceHandle { /// Lookup an export with the given export declaration. pub fn lookup_by_declaration(&self, export: &ExportIndex) -> Export { - let instance = self.instance(); + let instance = self.instance().clone(); + let instance_ref = instance.instance_ref(); match export { ExportIndex::Function(index) => { - let sig_index = &instance.module.functions[*index]; + let sig_index = &instance_ref.module.functions[*index]; let (address, vmctx) = - if let Some(def_index) = instance.module.local_func_index(*index) { + if let Some(def_index) = instance_ref.module.local_func_index(*index) { ( - instance.functions[def_index].0 as *const _, + instance_ref.functions[def_index].0 as *const _, VMFunctionEnvironment { - vmctx: instance.vmctx_ptr(), + vmctx: instance_ref.vmctx_ptr(), }, ) } else { - let import = instance.imported_function(*index); + let import = instance_ref.imported_function(*index); (import.body, import.environment) }; - let call_trampoline = Some(instance.function_call_trampolines[*sig_index]); - let signature = instance.module.signatures[*sig_index].clone(); + let call_trampoline = Some(instance_ref.function_call_trampolines[*sig_index]); + let signature = instance_ref.module.signatures[*sig_index].clone(); ExportFunction { address, @@ -1006,43 +1073,56 @@ impl InstanceHandle { signature, vmctx, call_trampoline, + //instance_allocator: Some(instance), } .into() } ExportIndex::Table(index) => { - let from = if let Some(def_index) = instance.module.local_table_index(*index) { - instance.tables[def_index].clone() + let from = if let Some(def_index) = instance_ref.module.local_table_index(*index) { + instance_ref.tables[def_index].clone() } else { - let import = instance.imported_table(*index); + let import = instance_ref.imported_table(*index); import.from.clone() }; - ExportTable { from }.into() + ExportTable { + from, + //instance_allocator: Some(instance), + } + .into() } ExportIndex::Memory(index) => { - let from = if let Some(def_index) = instance.module.local_memory_index(*index) { - instance.memories[def_index].clone() + let from = if let Some(def_index) = instance_ref.module.local_memory_index(*index) { + instance_ref.memories[def_index].clone() } else { - let import = instance.imported_memory(*index); + let import = instance_ref.imported_memory(*index); import.from.clone() }; - ExportMemory { from }.into() + ExportMemory { + from, + //instance_allocator: Some(instance), + } + .into() } ExportIndex::Global(index) => { let from = { - if let Some(def_index) = instance.module.local_global_index(*index) { - instance.globals[def_index].clone() + if let Some(def_index) = instance_ref.module.local_global_index(*index) { + instance_ref.globals[def_index].clone() } else { - let import = instance.imported_global(*index); + let import = instance_ref.imported_global(*index); import.from.clone() } }; - ExportGlobal { from }.into() + ExportGlobal { + from, + //instance_allocator: Some(instance), + } + .into() } } } @@ -1058,12 +1138,12 @@ impl InstanceHandle { /// Return a reference to the custom state attached to this instance. pub fn host_state(&self) -> &dyn Any { - self.instance().host_state() + self.instance().instance_ref().host_state() } /// Return the memory index for the given `VMMemoryDefinition` in this instance. pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex { - self.instance().memory_index(memory) + self.instance().instance_ref().memory_index(memory) } /// Grow memory in this instance by the specified amount of pages. @@ -1078,12 +1158,14 @@ impl InstanceHandle { where IntoPages: Into, { - self.instance().memory_grow(memory_index, delta) + self.instance() + .instance_ref() + .memory_grow(memory_index, delta) } /// Return the table index for the given `VMTableDefinition` in this instance. pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex { - self.instance().table_index(table) + self.instance().instance_ref().table_index(table) } /// Grow table in this instance by the specified amount of pages. @@ -1091,7 +1173,9 @@ impl InstanceHandle { /// Returns `None` if memory can't be grown by the specified amount /// of pages. pub fn table_grow(&self, table_index: LocalTableIndex, delta: u32) -> Option { - self.instance().table_grow(table_index, delta) + self.instance() + .instance_ref() + .table_grow(table_index, delta) } /// Get table element reference. @@ -1102,7 +1186,7 @@ impl InstanceHandle { table_index: LocalTableIndex, index: u32, ) -> Option { - self.instance().table_get(table_index, index) + self.instance().instance_ref().table_get(table_index, index) } /// Set table element reference. @@ -1114,39 +1198,14 @@ impl InstanceHandle { index: u32, val: VMCallerCheckedAnyfunc, ) -> Result<(), Trap> { - self.instance().table_set(table_index, index, val) + self.instance() + .instance_ref() + .table_set(table_index, index, val) } /// Get a table defined locally within this module. pub fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table { - self.instance().get_local_table(index) - } - - /// Return a reference to the contained `Instance`. - pub(crate) fn instance(&self) -> &Instance { - unsafe { &*(self.instance as *const Instance) } - } - - /// Deallocates memory associated with this instance. - /// - /// # Safety - /// - /// This is unsafe because there might be other handles to this - /// `InstanceHandle` elsewhere, and there's nothing preventing - /// usage of this handle after this function is called. - pub unsafe fn dealloc(&self) { - let instance = self.instance(); - let layout = Instance::alloc_layout(&instance.offsets); - ptr::drop_in_place(self.instance); - alloc::dealloc(self.instance.cast(), layout); - } -} - -impl Drop for InstanceHandle { - fn drop(&mut self) { - if self.owned_instance { - unsafe { self.dealloc() } - } + self.instance().instance_ref().get_local_table(index) } } diff --git a/lib/vm/src/trap/traphandlers.rs b/lib/vm/src/trap/traphandlers.rs index 452e6ed61d9..cb0a407e7f0 100644 --- a/lib/vm/src/trap/traphandlers.rs +++ b/lib/vm/src/trap/traphandlers.rs @@ -615,12 +615,13 @@ impl CallThreadState { // in which case run them all. If anything handles the trap then we // return that the trap was handled. let any_instance = self.any_instance(|i| { - let handler = match i.instance().signal_handler.replace(None) { + let instance_ref = i.instance().instance_ref(); + let handler = match instance_ref.signal_handler.replace(None) { Some(handler) => handler, None => return false, }; let result = call_handler(&handler); - i.instance().signal_handler.set(Some(handler)); + instance_ref.signal_handler.set(Some(handler)); result }); From e77afea3832aca4d3ba0d9e660d447a943284d74 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Mon, 23 Nov 2020 17:25:09 +0100 Subject: [PATCH 05/27] big patch: Start injecting `InstanceAllocator` everywhere. --- lib/api/src/externals/function.rs | 4 ++++ lib/api/src/externals/global.rs | 1 + lib/api/src/externals/memory.rs | 1 + lib/api/src/externals/table.rs | 1 + lib/api/src/native.rs | 2 ++ lib/api/src/types.rs | 1 + lib/vm/src/export.rs | 24 ++++++++++++------------ lib/vm/src/instance.rs | 8 ++++---- 8 files changed, 26 insertions(+), 16 deletions(-) diff --git a/lib/api/src/externals/function.rs b/lib/api/src/externals/function.rs index cf5bc04d5ff..4dd62a0be88 100644 --- a/lib/api/src/externals/function.rs +++ b/lib/api/src/externals/function.rs @@ -98,6 +98,7 @@ impl Function { vmctx, signature: ty.clone(), call_trampoline: None, + instance_allocator: None, }, } } @@ -150,6 +151,7 @@ impl Function { vmctx, signature: ty.clone(), call_trampoline: None, + instance_allocator: None, }, } } @@ -194,6 +196,7 @@ impl Function { signature, kind: VMFunctionKind::Static, call_trampoline: None, + instance_allocator: None, }, } } @@ -250,6 +253,7 @@ impl Function { vmctx, signature, call_trampoline: None, + instance_allocator: None, }, } } diff --git a/lib/api/src/externals/global.rs b/lib/api/src/externals/global.rs index 6dcac0d35dc..add92598ce3 100644 --- a/lib/api/src/externals/global.rs +++ b/lib/api/src/externals/global.rs @@ -218,6 +218,7 @@ impl<'a> Exportable<'a> for Global { fn to_export(&self) -> Export { ExportGlobal { from: self.global.clone(), + instance_allocator: None, } .into() } diff --git a/lib/api/src/externals/memory.rs b/lib/api/src/externals/memory.rs index 16dc7b202fb..f03583d30c3 100644 --- a/lib/api/src/externals/memory.rs +++ b/lib/api/src/externals/memory.rs @@ -248,6 +248,7 @@ impl<'a> Exportable<'a> for Memory { fn to_export(&self) -> Export { ExportMemory { from: self.memory.clone(), + instance_allocator: None, } .into() } diff --git a/lib/api/src/externals/table.rs b/lib/api/src/externals/table.rs index 92e145cae0c..27725d8e004 100644 --- a/lib/api/src/externals/table.rs +++ b/lib/api/src/externals/table.rs @@ -156,6 +156,7 @@ impl<'a> Exportable<'a> for Table { fn to_export(&self) -> Export { ExportTable { from: self.table.clone(), + instance_allocator: None, } .into() } diff --git a/lib/api/src/native.rs b/lib/api/src/native.rs index d1c6e78fd51..a119435020f 100644 --- a/lib/api/src/native.rs +++ b/lib/api/src/native.rs @@ -70,6 +70,7 @@ where signature, kind: other.arg_kind, call_trampoline: None, + instance_allocator: None, } } } @@ -90,6 +91,7 @@ where signature, kind: other.arg_kind, call_trampoline: None, + instance_allocator: None, }, } } diff --git a/lib/api/src/types.rs b/lib/api/src/types.rs index 638d79d3b22..2e3a378fcd9 100644 --- a/lib/api/src/types.rs +++ b/lib/api/src/types.rs @@ -81,6 +81,7 @@ impl ValFuncRef for Val { kind: wasmer_vm::VMFunctionKind::Static, vmctx: item.vmctx, call_trampoline: None, + instance_allocator: None, }; let f = Function::from_vm_export(store, export); Self::FuncRef(f) diff --git a/lib/vm/src/export.rs b/lib/vm/src/export.rs index 4675950be0f..60706f4c1c4 100644 --- a/lib/vm/src/export.rs +++ b/lib/vm/src/export.rs @@ -39,9 +39,9 @@ pub struct ExportFunction { /// Address of the function call trampoline owned by the same VMContext that owns the VMFunctionBody. /// May be None when the function is a host-function (FunctionType == Dynamic or vmctx == nullptr). pub call_trampoline: Option, - ///// A reference to the instance, to ensure it's not deallocated - ///// before the function. - //pub instance_allocator: Option>, + /// A reference to the instance, to ensure it's not deallocated + /// before the function. + pub instance_allocator: Option>, } /// # Safety @@ -63,9 +63,9 @@ impl From for Export { pub struct ExportTable { /// Pointer to the containing `Table`. pub from: Arc, - ///// A reference to the instance, to ensure it's not deallocated - ///// before the table. - //pub instance_allocator: Option>, + /// A reference to the instance, to ensure it's not deallocated + /// before the table. + pub instance_allocator: Option>, } /// # Safety @@ -107,9 +107,9 @@ impl From for Export { pub struct ExportMemory { /// Pointer to the containing `Memory`. pub from: Arc, - ///// A reference to the instance, to ensure it's not deallocated - ///// before the memory. - //pub instance_allocator: Option>, + /// A reference to the instance, to ensure it's not deallocated + /// before the memory. + pub instance_allocator: Option>, } /// # Safety @@ -151,9 +151,9 @@ impl From for Export { pub struct ExportGlobal { /// The global declaration, used for compatibility checking. pub from: Arc, - ///// A reference to the instance, to ensure it's not deallocated - ///// before the global. - //pub instance_allocator: Option>, + /// A reference to the instance, to ensure it's not deallocated + /// before the global. + pub instance_allocator: Option>, } /// # Safety diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 7352e18cc34..c70c6c07494 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -1073,7 +1073,7 @@ impl InstanceHandle { signature, vmctx, call_trampoline, - //instance_allocator: Some(instance), + instance_allocator: Some(instance), } .into() } @@ -1088,7 +1088,7 @@ impl InstanceHandle { ExportTable { from, - //instance_allocator: Some(instance), + instance_allocator: Some(instance), } .into() } @@ -1103,7 +1103,7 @@ impl InstanceHandle { ExportMemory { from, - //instance_allocator: Some(instance), + instance_allocator: Some(instance), } .into() } @@ -1120,7 +1120,7 @@ impl InstanceHandle { ExportGlobal { from, - //instance_allocator: Some(instance), + instance_allocator: Some(instance), } .into() } From d7bcc211c2f3ec3bed530f76711f7d97fd638f85 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Mon, 23 Nov 2020 17:33:22 +0100 Subject: [PATCH 06/27] big patch: add some traces. --- lib/api/src/externals/function.rs | 6 ++++++ lib/api/src/instance.rs | 6 ++++++ lib/vm/src/instance.rs | 10 +++++++--- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/lib/api/src/externals/function.rs b/lib/api/src/externals/function.rs index 4dd62a0be88..9261dd38910 100644 --- a/lib/api/src/externals/function.rs +++ b/lib/api/src/externals/function.rs @@ -56,6 +56,12 @@ pub struct Function { pub(crate) exported: ExportFunction, } +impl Drop for Function { + fn drop(&mut self) { + println!("Dropping `wasmer::Function`"); + } +} + impl Function { /// Creates a new host `Function` (dynamic) with the provided signature. /// diff --git a/lib/api/src/instance.rs b/lib/api/src/instance.rs index 21986eea8e1..521fcea7ebd 100644 --- a/lib/api/src/instance.rs +++ b/lib/api/src/instance.rs @@ -115,3 +115,9 @@ impl fmt::Debug for Instance { .finish() } } + +impl Drop for Instance { + fn drop(&mut self) { + println!("Dropping `wasmer::Instance`"); + } +} diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index c70c6c07494..9edcdd04185 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -719,8 +719,6 @@ impl InstanceAllocator { fn allocate_instance(offsets: &VMOffsets) -> NonNull { let layout = Self::instance_layout(offsets); - dbg!(&layout); - #[allow(clippy::cast_ptr_alignment)] let arc_instance_ptr = unsafe { alloc::alloc(layout) as *mut Arc }; @@ -750,8 +748,15 @@ impl PartialEq for InstanceAllocator { } } +impl Drop for Instance { + fn drop(&mut self) { + println!("Dropping `wasmer_vm::Instance`"); + } +} + impl Drop for InstanceAllocator { fn drop(&mut self) { + println!("Dropping `wasmer_vm::InstanceAllocator`"); unsafe { Self::deallocate_instance(self) }; } } @@ -827,7 +832,6 @@ impl InstanceHandle { let handle = { let layout = InstanceAllocator::instance_layout(&offsets); - dbg!(&layout); let arc_instance = Arc::new(InstanceAllocator::new( Instance { module, From b7cdb8fcb32c30a4eecb308c877e688db8f3aef4 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Tue, 24 Nov 2020 10:53:56 +0100 Subject: [PATCH 07/27] doc(vm) Code polishing and write more documentation. --- lib/engine/src/artifact.rs | 9 +- lib/vm/src/instance.rs | 211 ++++++++++++++++++++++++++----------- 2 files changed, 155 insertions(+), 65 deletions(-) diff --git a/lib/engine/src/artifact.rs b/lib/engine/src/artifact.rs index 91cfe4b1efe..3920b45c232 100644 --- a/lib/engine/src/artifact.rs +++ b/lib/engine/src/artifact.rs @@ -95,7 +95,8 @@ pub trait Artifact: Send + Sync + Upcastable { self.preinstantiate()?; let module = self.module(); - let (instance_ptr, offsets) = InstanceHandle::allocate_instance(&module); + let (arc_instance_allocator_ptr, offsets) = + InstanceHandle::allocate_instance_allocator(&module); let imports = resolve_imports( &module, resolver, @@ -106,14 +107,14 @@ pub trait Artifact: Send + Sync + Upcastable { .map_err(InstantiationError::Link)?; // Get pointers to where metadata about local memories should live in VM memory. let memory_definition_locations = - InstanceHandle::memory_definition_locations(instance_ptr, &offsets); + InstanceHandle::memory_definition_locations(arc_instance_allocator_ptr, &offsets); let finished_memories = tunables .create_memories(&module, self.memory_styles(), &memory_definition_locations) .map_err(InstantiationError::Link)? .into_boxed_slice(); // Get pointers to where metadata about local tables should live in VM memory. let table_definition_locations = - InstanceHandle::table_definition_locations(instance_ptr, &offsets); + InstanceHandle::table_definition_locations(arc_instance_allocator_ptr, &offsets); let finished_tables = tunables .create_tables(&module, self.table_styles(), &table_definition_locations) .map_err(InstantiationError::Link)? @@ -126,7 +127,7 @@ pub trait Artifact: Send + Sync + Upcastable { self.register_frame_info(); InstanceHandle::new( - instance_ptr, + arc_instance_allocator_ptr, offsets, module, self.finished_functions().clone(), diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index a4f077a65cb..0b6626f8e93 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -1,9 +1,12 @@ // This file contains code from external sources. // Attributions: https://github.com/wasmerio/wasmer/blob/master/ATTRIBUTIONS.md -//! An `Instance` contains all the runtime state used by execution of a -//! wasm module (except its callstack and register state). An -//! `InstanceHandle` is a wrapper around an `Instance`. +//! An `Instance` contains all the runtime state used by execution of +//! a WebAssembly module (except its callstack and register state). An +//! `InstanceAllocator` is a wrapper around `Instance` that manages +//! how it is allocated and deallocated. An `InstanceHandle` is a +//! wrapper around an `InstanceAllocator`. + use crate::export::Export; use crate::global::Global; use crate::imports::Imports; @@ -36,37 +39,12 @@ use wasmer_types::{ SignatureIndex, TableIndex, TableInitializer, }; -cfg_if::cfg_if! { - if #[cfg(unix)] { - pub type SignalHandler = dyn Fn(libc::c_int, *const libc::siginfo_t, *const libc::c_void) -> bool; - - impl InstanceHandle { - /// Set a custom signal handler - pub fn set_signal_handler(&self, handler: H) - where - H: 'static + Fn(libc::c_int, *const libc::siginfo_t, *const libc::c_void) -> bool, - { - self.instance().instance_ref().signal_handler.set(Some(Box::new(handler))); - } - } - } else if #[cfg(target_os = "windows")] { - pub type SignalHandler = dyn Fn(winapi::um::winnt::PEXCEPTION_POINTERS) -> bool; - - impl InstanceHandle { - /// Set a custom signal handler - pub fn set_signal_handler(&self, handler: H) - where - H: 'static + Fn(winapi::um::winnt::PEXCEPTION_POINTERS) -> bool, - { - self.instance().instance_ref().signal_handler.set(Some(Box::new(handler))); - } - } - } -} - /// A WebAssembly instance. /// -/// This is repr(C) to ensure that the vmctx field is last. +/// The type is dynamically-sized. Indeed, the `vmctx` field can +/// contain various data. That's why the type has a C representation +/// to ensure that the `vmctx` field is last. See the documentation of +/// the `vmctx` field to learn more. #[repr(C)] pub(crate) struct Instance { /// The `ModuleInfo` this `Instance` was instantiated from. @@ -105,9 +83,10 @@ pub(crate) struct Instance { /// Handler run when `SIGBUS`, `SIGFPE`, `SIGILL`, or `SIGSEGV` are caught by the instance thread. pub(crate) signal_handler: Cell>>, - /// Additional context used by compiled wasm code. This field is last, and - /// represents a dynamically-sized array that extends beyond the nominal - /// end of the struct (similar to a flexible array member). + /// Additional context used by compiled WebAssembly code. This + /// field is last, and represents a dynamically-sized array that + /// extends beyond the nominal end of the struct (similar to a + /// flexible array member). vmctx: VMContext, } @@ -333,7 +312,7 @@ impl Instance { } } - /// Return the offset from the vmctx pointer to its containing Instance. + /// Return the offset from the vmctx pointer to its containing `Instance`. #[inline] pub(crate) fn vmctx_offset() -> isize { offset_of!(Self, vmctx) as isize @@ -438,7 +417,7 @@ impl Instance { result } - // Get table element by index. + /// Get table element by index. fn table_get( &self, table_index: LocalTableIndex, @@ -450,6 +429,7 @@ impl Instance { .get(index) } + /// Set table element by index. fn table_set( &self, table_index: LocalTableIndex, @@ -482,6 +462,7 @@ impl Instance { let import = self.imported_function(index); (import.body, import.environment) }; + VMCallerCheckedAnyfunc { func_ptr, type_index, @@ -680,38 +661,107 @@ impl Instance { } } +/// An `InstanceAllocator` is responsible to allocate, to deallocate, +/// and to give access to an `Instance`, in such a way that `Instance` +/// is unique, can be shared, safely, across threads, without +/// duplicating the pointer. `InstanceAllocator` must be the only +/// owner of an `Instance`. +/// +/// Consequently, one must not share `Instance` but +/// `InstanceAllocator` behind a reference-counting pointer. It is +/// designed like that. That's why the astute reader will notice there +/// is only the `Arc` type in the codebase, never +/// `InstanceAllocator` alone. +/// +/// It is important to remind that `Instance` has a dynamic size +/// defined by `VMOffsets`: The `Instance.vmctx` field represents a +/// dynamically-sized array that extends beyond the nominal end of the +/// type. So in order to create an instance of it, we must: +/// +/// 1. Define the correct layout for `Instance` (size and alignment), +/// 2. Allocate it properly. +/// +/// It must be considered that not only the layout of `Instance` is +/// needed, but the layout of `Arc` +/// entirely. That's exactly what `InstanceAllocator::instance_layout` +/// does. +/// +/// Then `InstanceAllocator::allocate_self` will use this layout +/// to allocate an empty `Arc` properly. This +/// allocation must be freed with +/// `InstanceAllocator::deallocate_self` if and only if it has +/// been set correctly. The `Drop` implementation of +/// `InstanceAllocator` calls its `deallocate_self` method without +/// checking if this property holds. +/// +/// Note for the curious reader: `InstanceHandle::allocate_self` +/// and `InstanceHandle::new` will respectively allocate a proper +/// `Arc` and will fill it correctly. +/// +/// A little bit of background: The initial goal was to be able to +/// shared an `Instance` between an `InstanceHandle` and the module +/// exports, so that one can drop a `InstanceHandle` but still being +/// able to use the exports properly. +/// +/// This structure has a C representation because `Instance` is +/// dynamically-sized, and the `instance` field must be last. #[derive(Debug)] #[repr(C)] pub struct InstanceAllocator { - ptr_to_dealloc: *const Arc, - ptr_layout: Layout, + /// The pointer to the allocation for `Self`, from the + /// `Self::allocate_self` method. + self_ptr: *const Arc, + + /// The layout of `Self` (which can vary depending of the + /// `Instance`'s layout). + self_layout: Layout, + + /// The `Instance` itself. It must be the last field of + /// `InstanceAllocator` since `Instance` is dyamically-sized. + /// + /// `Instance` must not be dropped manually by Rust, because it's + /// allocated manually with `alloc` and a specific layout (Rust + /// would be able to drop `Instance` itself but it will imply a + /// memory leak because of `alloc`), hence the `ManuallyDrop`. instance: mem::ManuallyDrop, } impl InstanceAllocator { + /// Create a new `InstanceAllocator`. It allocates nothing. It + /// fills nothing. The `Instance` must be already valid and + /// filled. `self_ptr` and `self_layout` must be the pointer and + /// the layout returned by `Self::allocate_self` used to build + /// `Self`. #[inline(always)] - const fn new(instance: Instance, ptr_to_dealloc: *const Arc, ptr_layout: Layout) -> Self { + const fn new(instance: Instance, self_ptr: *const Arc, self_layout: Layout) -> Self { Self { + self_ptr, + self_layout, instance: mem::ManuallyDrop::new(instance), - ptr_to_dealloc, - ptr_layout, } } + /// Calculate the appropriate layout for `Self`. fn instance_layout(offsets: &VMOffsets) -> Layout { - let size = mem::size_of::>>() - .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap()) - .unwrap(); - let align = mem::align_of::>>(); + let size = mem::size_of::>() + .checked_add( + usize::try_from(offsets.size_of_vmctx()) + .expect("Failed to convert the size of `vmctx` to a `usize`"), + ) + .expect("Failed to compute the size of `Arc`"); + let align = mem::align_of::>(); Layout::from_size_align(size, align).unwrap() } - fn allocate_instance(offsets: &VMOffsets) -> NonNull { + /// Allocate `Self`. + /// + /// `offsets` is used to compute the layout with `Self::instance_layout`. + fn allocate_self(offsets: &VMOffsets) -> (NonNull>, Layout) { let layout = Self::instance_layout(offsets); #[allow(clippy::cast_ptr_alignment)] - let arc_instance_ptr = unsafe { alloc::alloc(layout) as *mut Arc }; + let arc_instance_ptr = unsafe { alloc::alloc(layout) as *mut Arc }; let ptr = if let Some(ptr) = NonNull::new(arc_instance_ptr) { ptr.cast() @@ -719,14 +769,21 @@ impl InstanceAllocator { alloc::handle_alloc_error(layout); }; - ptr + (ptr, layout) } - unsafe fn deallocate_instance(&mut self) { + /// Deallocate `Self`. + /// + /// # Safety + /// + /// `Self` must be correctly set and filled before being dropped + /// and deallocated. + unsafe fn deallocate_self(&mut self) { mem::ManuallyDrop::drop(&mut self.instance); - std::alloc::dealloc(self.ptr_to_dealloc as *mut _, self.ptr_layout); + std::alloc::dealloc(self.self_ptr as *mut u8, self.self_layout); } + /// Get a reference to the `Instance`. #[inline(always)] pub(crate) fn instance_ref(&self) -> &Instance { &self.instance @@ -735,7 +792,7 @@ impl InstanceAllocator { impl PartialEq for InstanceAllocator { fn eq(&self, other: &Self) -> bool { - self.ptr_to_dealloc == other.ptr_to_dealloc + self.self_ptr == other.self_ptr } } @@ -748,7 +805,7 @@ impl Drop for Instance { impl Drop for InstanceAllocator { fn drop(&mut self) { println!("Dropping `wasmer_vm::InstanceAllocator`"); - unsafe { Self::deallocate_instance(self) }; + unsafe { Self::deallocate_self(self) }; } } @@ -768,10 +825,12 @@ impl InstanceHandle { /// /// Returns the instance pointer and the [`VMOffsets`] that describe the /// memory buffer pointed to by the instance pointer. - pub fn allocate_instance(module: &ModuleInfo) -> (NonNull, VMOffsets) { + pub fn allocate_instance_allocator(module: &ModuleInfo) -> (NonNull, VMOffsets) { let offsets = VMOffsets::new(mem::size_of::<*const u8>() as u8, module); + let (arc_instance_allocator_ptr, _arc_instance_allocator_layout) = + InstanceAllocator::allocate_self(&offsets); - (InstanceAllocator::allocate_instance(&offsets), offsets) + (arc_instance_allocator_ptr.cast(), offsets) } /// Create a new `InstanceHandle` pointing at a new `Instance`. @@ -801,7 +860,7 @@ impl InstanceHandle { /// all the local memories. #[allow(clippy::too_many_arguments)] pub unsafe fn new( - arc_instance_ptr: NonNull, + arc_instance_allocator_ptr: NonNull, offsets: VMOffsets, module: Arc, finished_functions: BoxedSlice, @@ -813,7 +872,9 @@ impl InstanceHandle { vmshared_signatures: BoxedSlice, host_state: Box, ) -> Result { - let arc_instance_ptr = arc_instance_ptr.cast::>().as_ptr(); + let arc_instance_allocator_ptr = arc_instance_allocator_ptr + .cast::>() + .as_ptr(); let vmctx_globals = finished_globals .values() .map(|m| m.vmglobal()) @@ -822,7 +883,7 @@ impl InstanceHandle { let passive_data = RefCell::new(module.passive_data.clone()); let handle = { - let layout = InstanceAllocator::instance_layout(&offsets); + let arc_instance_allocator_layout = InstanceAllocator::instance_layout(&offsets); let arc_instance = Arc::new(InstanceAllocator::new( Instance { module, @@ -838,14 +899,14 @@ impl InstanceHandle { signal_handler: Cell::new(None), vmctx: VMContext {}, }, - arc_instance_ptr, - layout, + arc_instance_allocator_ptr, + arc_instance_allocator_layout, )); let instance_ptr = Arc::as_ptr(&arc_instance); debug_assert_eq!(Arc::strong_count(&arc_instance), 1); - ptr::write(arc_instance_ptr, arc_instance); + ptr::write(arc_instance_allocator_ptr, arc_instance); let instance = Arc::from_raw(instance_ptr); debug_assert_eq!(Arc::strong_count(&instance), 1); @@ -1204,6 +1265,34 @@ impl InstanceHandle { } } +cfg_if::cfg_if! { + if #[cfg(unix)] { + pub type SignalHandler = dyn Fn(libc::c_int, *const libc::siginfo_t, *const libc::c_void) -> bool; + + impl InstanceHandle { + /// Set a custom signal handler + pub fn set_signal_handler(&self, handler: H) + where + H: 'static + Fn(libc::c_int, *const libc::siginfo_t, *const libc::c_void) -> bool, + { + self.instance().instance_ref().signal_handler.set(Some(Box::new(handler))); + } + } + } else if #[cfg(target_os = "windows")] { + pub type SignalHandler = dyn Fn(winapi::um::winnt::PEXCEPTION_POINTERS) -> bool; + + impl InstanceHandle { + /// Set a custom signal handler + pub fn set_signal_handler(&self, handler: H) + where + H: 'static + Fn(winapi::um::winnt::PEXCEPTION_POINTERS) -> bool, + { + self.instance().instance_ref().signal_handler.set(Some(Box::new(handler))); + } + } + } +} + fn check_table_init_bounds(instance: &Instance) -> Result<(), Trap> { let module = Arc::clone(&instance.module); for init in &module.table_initializers { From 0a63e56faf9c1ae3312fc1f520cc9c1a1a599058 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Tue, 24 Nov 2020 11:06:10 +0100 Subject: [PATCH 08/27] fix(vm) Avoid casting to `NonNull`. Originally, `Instance` was private and we were casting to `NonNull` to avoid exposing the type publicly. Now we can just use `Arc`. --- lib/vm/src/instance.rs | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 0b6626f8e93..0d50db20251 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -764,7 +764,7 @@ impl InstanceAllocator { let arc_instance_ptr = unsafe { alloc::alloc(layout) as *mut Arc }; let ptr = if let Some(ptr) = NonNull::new(arc_instance_ptr) { - ptr.cast() + ptr } else { alloc::handle_alloc_error(layout); }; @@ -825,12 +825,14 @@ impl InstanceHandle { /// /// Returns the instance pointer and the [`VMOffsets`] that describe the /// memory buffer pointed to by the instance pointer. - pub fn allocate_instance_allocator(module: &ModuleInfo) -> (NonNull, VMOffsets) { + pub fn allocate_instance_allocator( + module: &ModuleInfo, + ) -> (NonNull>, VMOffsets) { let offsets = VMOffsets::new(mem::size_of::<*const u8>() as u8, module); let (arc_instance_allocator_ptr, _arc_instance_allocator_layout) = InstanceAllocator::allocate_self(&offsets); - (arc_instance_allocator_ptr.cast(), offsets) + (arc_instance_allocator_ptr, offsets) } /// Create a new `InstanceHandle` pointing at a new `Instance`. @@ -860,7 +862,7 @@ impl InstanceHandle { /// all the local memories. #[allow(clippy::too_many_arguments)] pub unsafe fn new( - arc_instance_allocator_ptr: NonNull, + arc_instance_allocator_ptr: NonNull>, offsets: VMOffsets, module: Arc, finished_functions: BoxedSlice, @@ -872,9 +874,7 @@ impl InstanceHandle { vmshared_signatures: BoxedSlice, host_state: Box, ) -> Result { - let arc_instance_allocator_ptr = arc_instance_allocator_ptr - .cast::>() - .as_ptr(); + let arc_instance_allocator_ptr = arc_instance_allocator_ptr.as_ptr(); let vmctx_globals = finished_globals .values() .map(|m| m.vmglobal()) @@ -997,14 +997,16 @@ impl InstanceHandle { /// - `instance_ptr` must point to enough memory that all of the offsets in /// `offsets` point to valid locations in memory. pub unsafe fn memory_definition_locations( - instance_ptr: NonNull, + arc_instance_allocator_ptr: NonNull>, offsets: &VMOffsets, ) -> Vec> { let num_memories = offsets.num_local_memories; let num_memories = usize::try_from(num_memories).unwrap(); let mut out = Vec::with_capacity(num_memories); - // TODO: better encapsulate this logic, this shouldn't be duplicated - let base_ptr = instance_ptr.as_ptr().add(std::mem::size_of::()); + + let instance: &Instance = arc_instance_allocator_ptr.as_ref().instance_ref(); + let instance_ptr: *mut Instance = instance as *const _ as *mut _; + let base_ptr = instance_ptr.add(mem::size_of::()); for i in 0..num_memories { let mem_offset = offsets.vmctx_vmmemory_definition(LocalMemoryIndex::new(i)); @@ -1014,6 +1016,7 @@ impl InstanceHandle { out.push(new_ptr.cast()); } + out } @@ -1026,14 +1029,16 @@ impl InstanceHandle { /// - `instance_ptr` must point to enough memory that all of the offsets in /// `offsets` point to valid locations in memory. pub unsafe fn table_definition_locations( - instance_ptr: NonNull, + arc_instance_allocator_ptr: NonNull>, offsets: &VMOffsets, ) -> Vec> { let num_tables = offsets.num_local_tables; let num_tables = usize::try_from(num_tables).unwrap(); let mut out = Vec::with_capacity(num_tables); - // TODO: better encapsulate this logic, this shouldn't be duplicated - let base_ptr = instance_ptr.as_ptr().add(std::mem::size_of::()); + + let instance: &Instance = arc_instance_allocator_ptr.as_ref().instance_ref(); + let instance_ptr: *mut Instance = instance as *const _ as *mut _; + let base_ptr = instance_ptr.add(std::mem::size_of::()); for i in 0..num_tables { let table_offset = offsets.vmctx_vmtable_definition(LocalTableIndex::new(i)); From f4d9deb902e05814dba2e8fa3e0662b4dd69a8fd Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Tue, 24 Nov 2020 14:55:01 +0100 Subject: [PATCH 09/27] try something different --- lib/engine/src/artifact.rs | 9 +- lib/vm/src/export.rs | 8 +- lib/vm/src/instance.rs | 163 +++++++++++++++++++++++-------------- 3 files changed, 111 insertions(+), 69 deletions(-) diff --git a/lib/engine/src/artifact.rs b/lib/engine/src/artifact.rs index 3920b45c232..91cfe4b1efe 100644 --- a/lib/engine/src/artifact.rs +++ b/lib/engine/src/artifact.rs @@ -95,8 +95,7 @@ pub trait Artifact: Send + Sync + Upcastable { self.preinstantiate()?; let module = self.module(); - let (arc_instance_allocator_ptr, offsets) = - InstanceHandle::allocate_instance_allocator(&module); + let (instance_ptr, offsets) = InstanceHandle::allocate_instance(&module); let imports = resolve_imports( &module, resolver, @@ -107,14 +106,14 @@ pub trait Artifact: Send + Sync + Upcastable { .map_err(InstantiationError::Link)?; // Get pointers to where metadata about local memories should live in VM memory. let memory_definition_locations = - InstanceHandle::memory_definition_locations(arc_instance_allocator_ptr, &offsets); + InstanceHandle::memory_definition_locations(instance_ptr, &offsets); let finished_memories = tunables .create_memories(&module, self.memory_styles(), &memory_definition_locations) .map_err(InstantiationError::Link)? .into_boxed_slice(); // Get pointers to where metadata about local tables should live in VM memory. let table_definition_locations = - InstanceHandle::table_definition_locations(arc_instance_allocator_ptr, &offsets); + InstanceHandle::table_definition_locations(instance_ptr, &offsets); let finished_tables = tunables .create_tables(&module, self.table_styles(), &table_definition_locations) .map_err(InstantiationError::Link)? @@ -127,7 +126,7 @@ pub trait Artifact: Send + Sync + Upcastable { self.register_frame_info(); InstanceHandle::new( - arc_instance_allocator_ptr, + instance_ptr, offsets, module, self.finished_functions().clone(), diff --git a/lib/vm/src/export.rs b/lib/vm/src/export.rs index 60706f4c1c4..ffd5b5da08b 100644 --- a/lib/vm/src/export.rs +++ b/lib/vm/src/export.rs @@ -41,7 +41,7 @@ pub struct ExportFunction { pub call_trampoline: Option, /// A reference to the instance, to ensure it's not deallocated /// before the function. - pub instance_allocator: Option>, + pub instance_allocator: Option, } /// # Safety @@ -65,7 +65,7 @@ pub struct ExportTable { pub from: Arc, /// A reference to the instance, to ensure it's not deallocated /// before the table. - pub instance_allocator: Option>, + pub instance_allocator: Option, } /// # Safety @@ -109,7 +109,7 @@ pub struct ExportMemory { pub from: Arc, /// A reference to the instance, to ensure it's not deallocated /// before the memory. - pub instance_allocator: Option>, + pub instance_allocator: Option, } /// # Safety @@ -153,7 +153,7 @@ pub struct ExportGlobal { pub from: Arc, /// A reference to the instance, to ensure it's not deallocated /// before the global. - pub instance_allocator: Option>, + pub instance_allocator: Option, } /// # Safety diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 0d50db20251..20200b4531c 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -30,7 +30,7 @@ use std::collections::HashMap; use std::convert::{TryFrom, TryInto}; use std::fmt; use std::ptr::NonNull; -use std::sync::Arc; +use std::sync::{atomic, Arc}; use std::{mem, ptr, slice}; use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap}; use wasmer_types::{ @@ -710,11 +710,12 @@ impl Instance { pub struct InstanceAllocator { /// The pointer to the allocation for `Self`, from the /// `Self::allocate_self` method. - self_ptr: *const Arc, + //self_ptr: *const Self, + strong: Arc, /// The layout of `Self` (which can vary depending of the /// `Instance`'s layout). - self_layout: Layout, + instance_layout: Layout, /// The `Instance` itself. It must be the last field of /// `InstanceAllocator` since `Instance` is dyamically-sized. @@ -723,7 +724,7 @@ pub struct InstanceAllocator { /// allocated manually with `alloc` and a specific layout (Rust /// would be able to drop `Instance` itself but it will imply a /// memory leak because of `alloc`), hence the `ManuallyDrop`. - instance: mem::ManuallyDrop, + instance: NonNull, } impl InstanceAllocator { @@ -733,23 +734,27 @@ impl InstanceAllocator { /// the layout returned by `Self::allocate_self` used to build /// `Self`. #[inline(always)] - const fn new(instance: Instance, self_ptr: *const Arc, self_layout: Layout) -> Self { + fn new( + instance: NonNull, + /*self_ptr: *const Self, */ instance_layout: Layout, + ) -> Self { Self { - self_ptr, - self_layout, - instance: mem::ManuallyDrop::new(instance), + //self_ptr, + strong: Arc::new(atomic::AtomicUsize::new(1)), + instance_layout, + instance, } } /// Calculate the appropriate layout for `Self`. fn instance_layout(offsets: &VMOffsets) -> Layout { - let size = mem::size_of::>() + let size = mem::size_of::() .checked_add( usize::try_from(offsets.size_of_vmctx()) .expect("Failed to convert the size of `vmctx` to a `usize`"), ) - .expect("Failed to compute the size of `Arc`"); - let align = mem::align_of::>(); + .unwrap(); + let align = mem::align_of::(); Layout::from_size_align(size, align).unwrap() } @@ -757,13 +762,13 @@ impl InstanceAllocator { /// Allocate `Self`. /// /// `offsets` is used to compute the layout with `Self::instance_layout`. - fn allocate_self(offsets: &VMOffsets) -> (NonNull>, Layout) { + fn allocate_instance(offsets: &VMOffsets) -> (NonNull, Layout) { let layout = Self::instance_layout(offsets); #[allow(clippy::cast_ptr_alignment)] - let arc_instance_ptr = unsafe { alloc::alloc(layout) as *mut Arc }; + let instance_ptr = unsafe { alloc::alloc(layout) as *mut Instance }; - let ptr = if let Some(ptr) = NonNull::new(arc_instance_ptr) { + let ptr = if let Some(ptr) = NonNull::new(instance_ptr) { ptr } else { alloc::handle_alloc_error(layout); @@ -778,21 +783,47 @@ impl InstanceAllocator { /// /// `Self` must be correctly set and filled before being dropped /// and deallocated. - unsafe fn deallocate_self(&mut self) { - mem::ManuallyDrop::drop(&mut self.instance); - std::alloc::dealloc(self.self_ptr as *mut u8, self.self_layout); + unsafe fn deallocate_instance(&mut self) { + //mem::ManuallyDrop::drop(&mut self.instance); + let instance_ptr = self.instance.as_ptr(); + ptr::drop_in_place(instance_ptr); + std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout); } /// Get a reference to the `Instance`. #[inline(always)] pub(crate) fn instance_ref(&self) -> &Instance { - &self.instance + unsafe { self.instance.as_ref() } + } + + fn instance_ptr(&self) -> *const Instance { + self.instance.as_ptr() + } +} + +impl Clone for InstanceAllocator { + #[inline] + fn clone(&self) -> Self { + let _old_size = self.strong.fetch_add(1, atomic::Ordering::Relaxed); + + /* + let old_size > MAX_REFCOUNT { + abort(); + } + */ + + Self { + //self_ptr: self.self_ptr, + strong: self.strong.clone(), + instance_layout: self.instance_layout, + instance: self.instance.clone(), + } } } impl PartialEq for InstanceAllocator { fn eq(&self, other: &Self) -> bool { - self.self_ptr == other.self_ptr + self.instance == other.instance } } @@ -804,15 +835,26 @@ impl Drop for Instance { impl Drop for InstanceAllocator { fn drop(&mut self) { - println!("Dropping `wasmer_vm::InstanceAllocator`"); - unsafe { Self::deallocate_self(self) }; + println!("Trying to drop `wasmer_vm::InstanceAllocator`..."); + + if self.strong.fetch_sub(1, atomic::Ordering::Release) != 1 { + println!("... not now"); + + return; + } + + println!("Yep, dropping it."); + + atomic::fence(atomic::Ordering::Acquire); + + unsafe { Self::deallocate_instance(self) }; } } /// A handle holding an `Instance` of a WebAssembly module. //#[derive(Hash, PartialEq, Eq)] pub struct InstanceHandle { - instance: Arc, + instance: InstanceAllocator, } /// # Safety @@ -825,14 +867,11 @@ impl InstanceHandle { /// /// Returns the instance pointer and the [`VMOffsets`] that describe the /// memory buffer pointed to by the instance pointer. - pub fn allocate_instance_allocator( - module: &ModuleInfo, - ) -> (NonNull>, VMOffsets) { + pub fn allocate_instance(module: &ModuleInfo) -> (NonNull, VMOffsets) { let offsets = VMOffsets::new(mem::size_of::<*const u8>() as u8, module); - let (arc_instance_allocator_ptr, _arc_instance_allocator_layout) = - InstanceAllocator::allocate_self(&offsets); + let (instance_ptr, _instance_layout) = InstanceAllocator::allocate_instance(&offsets); - (arc_instance_allocator_ptr, offsets) + (instance_ptr.cast(), offsets) } /// Create a new `InstanceHandle` pointing at a new `Instance`. @@ -862,7 +901,7 @@ impl InstanceHandle { /// all the local memories. #[allow(clippy::too_many_arguments)] pub unsafe fn new( - arc_instance_allocator_ptr: NonNull>, + instance_ptr: NonNull, offsets: VMOffsets, module: Arc, finished_functions: BoxedSlice, @@ -874,7 +913,7 @@ impl InstanceHandle { vmshared_signatures: BoxedSlice, host_state: Box, ) -> Result { - let arc_instance_allocator_ptr = arc_instance_allocator_ptr.as_ptr(); + let instance_ptr: NonNull = instance_ptr.cast(); let vmctx_globals = finished_globals .values() .map(|m| m.vmglobal()) @@ -883,35 +922,41 @@ impl InstanceHandle { let passive_data = RefCell::new(module.passive_data.clone()); let handle = { - let arc_instance_allocator_layout = InstanceAllocator::instance_layout(&offsets); - let arc_instance = Arc::new(InstanceAllocator::new( - Instance { - module, - offsets, - memories: finished_memories, - tables: finished_tables, - globals: finished_globals, - functions: finished_functions, - function_call_trampolines: finished_function_call_trampolines, - passive_elements: Default::default(), - passive_data, - host_state, - signal_handler: Cell::new(None), - vmctx: VMContext {}, - }, - arc_instance_allocator_ptr, - arc_instance_allocator_layout, - )); - + dbg!(instance_ptr); + let instance_layout = InstanceAllocator::instance_layout(&offsets); + let instance = Instance { + module, + offsets, + memories: finished_memories, + tables: finished_tables, + globals: finished_globals, + functions: finished_functions, + function_call_trampolines: finished_function_call_trampolines, + passive_elements: Default::default(), + passive_data, + host_state, + signal_handler: Cell::new(None), + vmctx: VMContext {}, + }; + + ptr::write(instance_ptr.as_ptr(), instance); + + let instance_allocator = InstanceAllocator::new(instance_ptr, instance_layout); + + /* let instance_ptr = Arc::as_ptr(&arc_instance); + dbg!(instance_ptr); debug_assert_eq!(Arc::strong_count(&arc_instance), 1); ptr::write(arc_instance_allocator_ptr, arc_instance); let instance = Arc::from_raw(instance_ptr); debug_assert_eq!(Arc::strong_count(&instance), 1); + */ - Self { instance } + Self { + instance: instance_allocator, + } }; let instance = handle.instance().instance_ref(); @@ -984,7 +1029,7 @@ impl InstanceHandle { } /// Return a reference to the contained `Instance`. - pub(crate) fn instance(&self) -> &Arc { + pub(crate) fn instance(&self) -> &InstanceAllocator { &self.instance } @@ -997,16 +1042,15 @@ impl InstanceHandle { /// - `instance_ptr` must point to enough memory that all of the offsets in /// `offsets` point to valid locations in memory. pub unsafe fn memory_definition_locations( - arc_instance_allocator_ptr: NonNull>, + instance_ptr: NonNull, offsets: &VMOffsets, ) -> Vec> { + let instance_ptr: NonNull = instance_ptr.cast(); let num_memories = offsets.num_local_memories; let num_memories = usize::try_from(num_memories).unwrap(); let mut out = Vec::with_capacity(num_memories); - let instance: &Instance = arc_instance_allocator_ptr.as_ref().instance_ref(); - let instance_ptr: *mut Instance = instance as *const _ as *mut _; - let base_ptr = instance_ptr.add(mem::size_of::()); + let base_ptr = instance_ptr.as_ptr().add(mem::size_of::()); for i in 0..num_memories { let mem_offset = offsets.vmctx_vmmemory_definition(LocalMemoryIndex::new(i)); @@ -1029,16 +1073,15 @@ impl InstanceHandle { /// - `instance_ptr` must point to enough memory that all of the offsets in /// `offsets` point to valid locations in memory. pub unsafe fn table_definition_locations( - arc_instance_allocator_ptr: NonNull>, + instance_ptr: NonNull, offsets: &VMOffsets, ) -> Vec> { + let instance_ptr: NonNull = instance_ptr.cast(); let num_tables = offsets.num_local_tables; let num_tables = usize::try_from(num_tables).unwrap(); let mut out = Vec::with_capacity(num_tables); - let instance: &Instance = arc_instance_allocator_ptr.as_ref().instance_ref(); - let instance_ptr: *mut Instance = instance as *const _ as *mut _; - let base_ptr = instance_ptr.add(std::mem::size_of::()); + let base_ptr = instance_ptr.as_ptr().add(std::mem::size_of::()); for i in 0..num_tables { let table_offset = offsets.vmctx_vmtable_definition(LocalTableIndex::new(i)); From 7f06d4f0c6049fa7005369823ad957a8ae664b99 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Tue, 24 Nov 2020 16:51:01 +0100 Subject: [PATCH 10/27] polish and update documentation --- lib/vm/src/instance.rs | 201 +++++++++++++++++++++++++---------------- 1 file changed, 125 insertions(+), 76 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 20200b4531c..226b4875597 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -664,39 +664,36 @@ impl Instance { /// An `InstanceAllocator` is responsible to allocate, to deallocate, /// and to give access to an `Instance`, in such a way that `Instance` /// is unique, can be shared, safely, across threads, without -/// duplicating the pointer. `InstanceAllocator` must be the only -/// owner of an `Instance`. +/// duplicating the pointer in multiple locations. `InstanceAllocator` +/// must be the only “owner” of an `Instance`. /// /// Consequently, one must not share `Instance` but -/// `InstanceAllocator` behind a reference-counting pointer. It is -/// designed like that. That's why the astute reader will notice there -/// is only the `Arc` type in the codebase, never -/// `InstanceAllocator` alone. +/// `InstanceAllocator`. It acts like an Atomically Reference Counted +/// to `Instance`. In short, `InstanceAllocator` is roughly a +/// simplified version of `std::sync::Arc`. /// -/// It is important to remind that `Instance` has a dynamic size -/// defined by `VMOffsets`: The `Instance.vmctx` field represents a +/// It is important to remind that `Instance` is dynamically-sized +/// based on `VMOffsets`: The `Instance.vmctx` field represents a /// dynamically-sized array that extends beyond the nominal end of the /// type. So in order to create an instance of it, we must: /// /// 1. Define the correct layout for `Instance` (size and alignment), /// 2. Allocate it properly. /// -/// It must be considered that not only the layout of `Instance` is -/// needed, but the layout of `Arc` -/// entirely. That's exactly what `InstanceAllocator::instance_layout` -/// does. +/// The `InstanceAllocator::instance_layout` computes the correct +/// layout to represent the wanted `Instance`. /// -/// Then `InstanceAllocator::allocate_self` will use this layout -/// to allocate an empty `Arc` properly. This -/// allocation must be freed with -/// `InstanceAllocator::deallocate_self` if and only if it has -/// been set correctly. The `Drop` implementation of -/// `InstanceAllocator` calls its `deallocate_self` method without -/// checking if this property holds. +/// Then `InstanceAllocator::allocate_instance` will use this layout +/// to allocate an empty `Instance` properly. This allocation must be +/// freed with `InstanceAllocator::deallocate_instance` if and only if +/// it has been set correctly. The `Drop` implementation of +/// `InstanceAllocator` calls its `deallocate_instance` method without +/// checking if this property holds, only when `Self.strong` is equal +/// to 1. /// -/// Note for the curious reader: `InstanceHandle::allocate_self` +/// Note for the curious reader: `InstanceHandle::allocate_instance` /// and `InstanceHandle::new` will respectively allocate a proper -/// `Arc` and will fill it correctly. +/// `Instance` and will fill it correctly. /// /// A little bit of background: The initial goal was to be able to /// shared an `Instance` between an `InstanceHandle` and the module @@ -708,13 +705,11 @@ impl Instance { #[derive(Debug)] #[repr(C)] pub struct InstanceAllocator { - /// The pointer to the allocation for `Self`, from the - /// `Self::allocate_self` method. - //self_ptr: *const Self, + /// Number of `Self` in the nature. It increases when `Self` is + /// cloned, and it decreases when `Self` is dropped. strong: Arc, - /// The layout of `Self` (which can vary depending of the - /// `Instance`'s layout). + /// The layout of `Instance` (which can vary). instance_layout: Layout, /// The `Instance` itself. It must be the last field of @@ -723,7 +718,10 @@ pub struct InstanceAllocator { /// `Instance` must not be dropped manually by Rust, because it's /// allocated manually with `alloc` and a specific layout (Rust /// would be able to drop `Instance` itself but it will imply a - /// memory leak because of `alloc`), hence the `ManuallyDrop`. + /// memory leak because of `alloc`). + /// + /// No one in the code has a copy of the `Instance`'s + /// pointer. `Self` is the only one. instance: NonNull, } @@ -733,33 +731,28 @@ impl InstanceAllocator { /// filled. `self_ptr` and `self_layout` must be the pointer and /// the layout returned by `Self::allocate_self` used to build /// `Self`. - #[inline(always)] - fn new( - instance: NonNull, - /*self_ptr: *const Self, */ instance_layout: Layout, - ) -> Self { + fn new(instance: NonNull, instance_layout: Layout) -> Self { Self { - //self_ptr, strong: Arc::new(atomic::AtomicUsize::new(1)), instance_layout, instance, } } - /// Calculate the appropriate layout for `Self`. + /// Calculate the appropriate layout for `Instance`. fn instance_layout(offsets: &VMOffsets) -> Layout { let size = mem::size_of::() .checked_add( usize::try_from(offsets.size_of_vmctx()) .expect("Failed to convert the size of `vmctx` to a `usize`"), ) - .unwrap(); + .expect("Failed to compute the size of `Instance`"); let align = mem::align_of::(); Layout::from_size_align(size, align).unwrap() } - /// Allocate `Self`. + /// Allocate `Instance` (it is an uninitialized pointer). /// /// `offsets` is used to compute the layout with `Self::instance_layout`. fn allocate_instance(offsets: &VMOffsets) -> (NonNull, Layout) { @@ -777,43 +770,59 @@ impl InstanceAllocator { (ptr, layout) } - /// Deallocate `Self`. + /// Deallocate `Instance`. /// /// # Safety /// - /// `Self` must be correctly set and filled before being dropped - /// and deallocated. + /// `Self.instance` must be correctly set and filled before being + /// dropped and deallocated. unsafe fn deallocate_instance(&mut self) { - //mem::ManuallyDrop::drop(&mut self.instance); let instance_ptr = self.instance.as_ptr(); + ptr::drop_in_place(instance_ptr); std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout); } - /// Get a reference to the `Instance`. - #[inline(always)] - pub(crate) fn instance_ref(&self) -> &Instance { - unsafe { self.instance.as_ref() } + /// Get the number of strong references pointing to this + /// `InstanceAllocator`. + pub fn strong_count(&self) -> usize { + self.strong.load(atomic::Ordering::SeqCst) } - fn instance_ptr(&self) -> *const Instance { - self.instance.as_ptr() + /// Get a reference to the `Instance`. + #[inline] + pub(crate) fn instance_ref<'a>(&'a self) -> &'a Instance { + // SAFETY: The pointer is properly aligned, it is + // “dereferencable”, it points to an initialized memory of + // `Instance`, and the reference has the lifetime `'a`. + unsafe { self.instance.as_ref() } } } +/// TODO: Review this super carefully. +unsafe impl Send for InstanceAllocator {} +unsafe impl Sync for InstanceAllocator {} + +/// A soft limit on the amount of references that may be made to an `InstanceAllocator`. +/// +/// Going above this limit will make the program to panic at exactly +/// `MAX_REFCOUNT` references. +const MAX_REFCOUNT: usize = std::usize::MAX - 1; + impl Clone for InstanceAllocator { + /// Makes a clone of `InstanceAllocator`. + /// + /// This creates another `InstanceAllocator` using the same + /// `instance` pointer, increasing the strong reference count. #[inline] fn clone(&self) -> Self { - let _old_size = self.strong.fetch_add(1, atomic::Ordering::Relaxed); + let old_size = self.strong.fetch_add(1, atomic::Ordering::Relaxed); - /* - let old_size > MAX_REFCOUNT { - abort(); + if old_size > MAX_REFCOUNT { + panic!("Too much references of `InstanceAllocator`"); } - */ Self { - //self_ptr: self.self_ptr, strong: self.strong.clone(), instance_layout: self.instance_layout, instance: self.instance.clone(), @@ -822,6 +831,8 @@ impl Clone for InstanceAllocator { } impl PartialEq for InstanceAllocator { + /// Two `InstanceAllocator` are equal if and only if + /// `Self.instance` points to the same location. fn eq(&self, other: &Self) -> bool { self.instance == other.instance } @@ -834,9 +845,17 @@ impl Drop for Instance { } impl Drop for InstanceAllocator { + /// Drop the `InstanceAllocator`. + /// + /// This will decrement the strong reference count. If it reaches + /// 1, then the `Self.instance` will be deallocated with + /// `Self::deallocate_instance`. fn drop(&mut self) { println!("Trying to drop `wasmer_vm::InstanceAllocator`..."); + // Because `fetch_sub` is already atomic, we do not need to + // synchronize with other threads unless we are going to + // delete the object. if self.strong.fetch_sub(1, atomic::Ordering::Release) != 1 { println!("... not now"); @@ -845,28 +864,53 @@ impl Drop for InstanceAllocator { println!("Yep, dropping it."); + // This fence is needed to prevent reordering of use of the data and + // deletion of the data. Because it is marked `Release`, the decreasing + // of the reference count synchronizes with this `Acquire` fence. This + // means that use of the data happens before decreasing the reference + // count, which happens before this fence, which happens before the + // deletion of the data. + // + // As explained in the [Boost documentation][1], + // + // > It is important to enforce any possible access to the object in one + // > thread (through an existing reference) to *happen before* deleting + // > the object in a different thread. This is achieved by a "release" + // > operation after dropping a reference (any access to the object + // > through this reference must obviously happened before), and an + // > "acquire" operation before deleting the object. + // + // [1]: https://www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html atomic::fence(atomic::Ordering::Acquire); + // Now we can deallocate the instance. Note that we don't + // check the pointer to `Instance` is correctly initialized, + // but the way `InstanceHandle` creates the + // `InstanceAllocator` ensures that. unsafe { Self::deallocate_instance(self) }; } } -/// A handle holding an `Instance` of a WebAssembly module. -//#[derive(Hash, PartialEq, Eq)] +/// A handle holding an `InstanceAllocator`, which holds an `Instance` +/// of a WebAssembly module. +/// +/// This is more or less a public facade of the private `Instance`, +/// providing useful higher-level API. +#[derive(Debug, PartialEq)] pub struct InstanceHandle { + /// The `InstanceAllocator`. See its documentation to learn more. instance: InstanceAllocator, } -/// # Safety -/// This is safe because there is no thread-specific logic in `InstanceHandle`. -/// TODO: this needs extra review -unsafe impl Send for InstanceHandle {} - impl InstanceHandle { /// Allocates an instance for use with `InstanceHandle::new`. /// /// Returns the instance pointer and the [`VMOffsets`] that describe the /// memory buffer pointed to by the instance pointer. + /// + /// It should ideally return `NonNull` rather than + /// `NonNull`, however `Instance` is private, and we want to + /// keep it private. pub fn allocate_instance(module: &ModuleInfo) -> (NonNull, VMOffsets) { let offsets = VMOffsets::new(mem::size_of::<*const u8>() as u8, module); let (instance_ptr, _instance_layout) = InstanceAllocator::allocate_instance(&offsets); @@ -874,15 +918,15 @@ impl InstanceHandle { (instance_ptr.cast(), offsets) } - /// Create a new `InstanceHandle` pointing at a new `Instance`. + /// Create a new `InstanceHandle` pointing at a new `InstanceAllocator`. /// /// # Safety /// /// This method is not necessarily inherently unsafe to call, but in general /// the APIs of an `Instance` are quite unsafe and have not been really /// audited for safety that much. As a result the unsafety here on this - /// method is a low-overhead way of saying "this is an extremely unsafe type - /// to work with". + /// method is a low-overhead way of saying “this is an extremely unsafe type + /// to work with”. /// /// Extreme care must be taken when working with `InstanceHandle` and it's /// recommended to have relatively intimate knowledge of how it works @@ -893,8 +937,7 @@ impl InstanceHandle { /// However the following must be taken care of before calling this function: /// - `instance_ptr` must point to valid memory sufficiently large /// for the `Instance`. `instance_ptr` will be owned by - /// `InstanceHandle`, see `InstanceHandle::owned_instance` to - /// learn more. + /// `InstanceAllocator`, see `InstanceAllocator` to learn more. /// - The memory at `instance.tables_ptr()` must be initialized with data for /// all the local tables. /// - The memory at `instance.memories_ptr()` must be initialized with data for @@ -913,7 +956,10 @@ impl InstanceHandle { vmshared_signatures: BoxedSlice, host_state: Box, ) -> Result { + // `NonNull` here actually means `NonNull`. See + // `Self::allocate_instance` to understand why. let instance_ptr: NonNull = instance_ptr.cast(); + let vmctx_globals = finished_globals .values() .map(|m| m.vmglobal()) @@ -922,8 +968,8 @@ impl InstanceHandle { let passive_data = RefCell::new(module.passive_data.clone()); let handle = { - dbg!(instance_ptr); let instance_layout = InstanceAllocator::instance_layout(&offsets); + // Create the `Instance`. The unique, the One. let instance = Instance { module, offsets, @@ -939,20 +985,17 @@ impl InstanceHandle { vmctx: VMContext {}, }; + // `instance` is moved at `instance_ptr`. This pointer has + // been allocated by `Self::allocate_instance` (so by + // `InstanceAllocator::allocate_instance`. ptr::write(instance_ptr.as_ptr(), instance); - let instance_allocator = InstanceAllocator::new(instance_ptr, instance_layout); - - /* - let instance_ptr = Arc::as_ptr(&arc_instance); - dbg!(instance_ptr); - debug_assert_eq!(Arc::strong_count(&arc_instance), 1); - - ptr::write(arc_instance_allocator_ptr, arc_instance); + // Now `instance_ptr` is correctly initialized! - let instance = Arc::from_raw(instance_ptr); - debug_assert_eq!(Arc::strong_count(&instance), 1); - */ + // `instance_ptr` is passed to `InstanceAllocator`, which + // makes it the only “owner” (it doesn't own the value, + // it's just the semantics we define). + let instance_allocator = InstanceAllocator::new(instance_ptr, instance_layout); Self { instance: instance_allocator, @@ -1045,7 +1088,10 @@ impl InstanceHandle { instance_ptr: NonNull, offsets: &VMOffsets, ) -> Vec> { + // `NonNull` here actually means `NonNull`. See + // `Self::allocate_instance` to understand why. let instance_ptr: NonNull = instance_ptr.cast(); + let num_memories = offsets.num_local_memories; let num_memories = usize::try_from(num_memories).unwrap(); let mut out = Vec::with_capacity(num_memories); @@ -1076,7 +1122,10 @@ impl InstanceHandle { instance_ptr: NonNull, offsets: &VMOffsets, ) -> Vec> { + // `NonNull` here actually means `NonNull`. See + // `Self::allocate_instance` to understand why. let instance_ptr: NonNull = instance_ptr.cast(); + let num_tables = offsets.num_local_tables; let num_tables = usize::try_from(num_tables).unwrap(); let mut out = Vec::with_capacity(num_tables); From f968b24f224fd4f37b882ab65e0609ffdd0df98e Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 09:28:15 +0100 Subject: [PATCH 11/27] feat(vm) Remove `InstanceHandle::from_vmctx`. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The `InstanceHandle::from_vmctx` method is only used by `CallThreadState::any_instance`, where a given closure expect an `&InstanceHandle`. However, the only closure that exists in the code only reads an `&Instance` from the `InstanceHandle`. So… this patch updates this only closure and `any_instance` to work on an `&Instance` directly, which allows us to remove `InstanceHandle::from_vmctx`. It results to less code and less pointer arithmetic. --- lib/vm/src/instance.rs | 19 ------------------- lib/vm/src/trap/traphandlers.rs | 19 ++++++++++++------- 2 files changed, 12 insertions(+), 26 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 226b4875597..8a0a5434207 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -1052,25 +1052,6 @@ impl InstanceHandle { Ok(handle) } - /// Create a new `InstanceHandle` pointing at the instance - /// pointed to by the given `VMContext` pointer. - /// - /// # Safety - /// This is unsafe because it doesn't work on just any `VMContext`, it must - /// be a `VMContext` allocated as part of an `Instance`. - pub(crate) unsafe fn from_vmctx(vmctx: *const VMContext) -> Self { - /* - let instance = (&*vmctx).instance(); - - Self { - instance: Arc::from_raw(instance as *const InstanceAllocator), - // We consider `vmctx` owns the `Instance`, not `Self`. - //owned_instance: false, - } - */ - todo!() - } - /// Return a reference to the contained `Instance`. pub(crate) fn instance(&self) -> &InstanceAllocator { &self.instance diff --git a/lib/vm/src/trap/traphandlers.rs b/lib/vm/src/trap/traphandlers.rs index cfb33c1da55..bfd0b656dcf 100644 --- a/lib/vm/src/trap/traphandlers.rs +++ b/lib/vm/src/trap/traphandlers.rs @@ -5,7 +5,7 @@ //! signalhandling mechanisms. use super::trapcode::TrapCode; -use crate::instance::{InstanceHandle, SignalHandler}; +use crate::instance::{Instance, SignalHandler}; use crate::vmcontext::{VMFunctionBody, VMFunctionEnvironment, VMTrampoline}; use backtrace::Backtrace; use std::any::Any; @@ -577,9 +577,15 @@ impl CallThreadState { }) } - fn any_instance(&self, func: impl Fn(&InstanceHandle) -> bool) -> bool { + fn any_instance(&self, func: impl Fn(&Instance) -> bool) -> bool { unsafe { - if func(&InstanceHandle::from_vmctx(self.vmctx.vmctx)) { + if func( + self.vmctx + .vmctx + .as_ref() + .expect("`VMContext` is null in `any_instance`") + .instance(), + ) { return true; } match self.prev { @@ -632,14 +638,13 @@ impl CallThreadState { // First up see if any instance registered has a custom trap handler, // in which case run them all. If anything handles the trap then we // return that the trap was handled. - let any_instance = self.any_instance(|i| { - let instance_ref = i.instance().instance_ref(); - let handler = match instance_ref.signal_handler.replace(None) { + let any_instance = self.any_instance(|instance: &Instance| { + let handler = match instance.signal_handler.replace(None) { Some(handler) => handler, None => return false, }; let result = call_handler(&handler); - instance_ref.signal_handler.set(Some(handler)); + instance.signal_handler.set(Some(handler)); result }); From 4d073ea8b8bb3217f76c82f74496aeee21bbd314 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 09:48:52 +0100 Subject: [PATCH 12/27] feat(vm) Rename `InstanceAllocator::instance_ref` to `::as_ref`. --- lib/vm/src/instance.rs | 42 ++++++++++++++++++------------------------ 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 8a0a5434207..1174158a0f4 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -791,7 +791,7 @@ impl InstanceAllocator { /// Get a reference to the `Instance`. #[inline] - pub(crate) fn instance_ref<'a>(&'a self) -> &'a Instance { + pub(crate) fn as_ref<'a>(&'a self) -> &'a Instance { // SAFETY: The pointer is properly aligned, it is // “dereferencable”, it points to an initialized memory of // `Instance`, and the reference has the lifetime `'a`. @@ -1001,7 +1001,7 @@ impl InstanceHandle { instance: instance_allocator, } }; - let instance = handle.instance().instance_ref(); + let instance = handle.instance().as_ref(); ptr::copy( vmshared_signatures.values().as_slice().as_ptr(), @@ -1133,7 +1133,7 @@ impl InstanceHandle { &self, data_initializers: &[DataInitializer<'_>], ) -> Result<(), Trap> { - let instance = self.instance().instance_ref(); + let instance = self.instance().as_ref(); check_table_init_bounds(instance)?; check_memory_init_bounds(instance, data_initializers)?; @@ -1149,22 +1149,22 @@ impl InstanceHandle { /// Return a reference to the vmctx used by compiled wasm code. pub fn vmctx(&self) -> &VMContext { - self.instance().instance_ref().vmctx() + self.instance().as_ref().vmctx() } /// Return a raw pointer to the vmctx used by compiled wasm code. pub fn vmctx_ptr(&self) -> *mut VMContext { - self.instance().instance_ref().vmctx_ptr() + self.instance().as_ref().vmctx_ptr() } /// Return a reference-counting pointer to a module. pub fn module(&self) -> &Arc { - self.instance().instance_ref().module() + self.instance().as_ref().module() } /// Return a reference to a module. pub fn module_ref(&self) -> &ModuleInfo { - self.instance().instance_ref().module_ref() + self.instance().as_ref().module_ref() } /// Lookup an export with the given name. @@ -1177,7 +1177,7 @@ impl InstanceHandle { /// Lookup an export with the given export declaration. pub fn lookup_by_declaration(&self, export: &ExportIndex) -> Export { let instance = self.instance().clone(); - let instance_ref = instance.instance_ref(); + let instance_ref = instance.as_ref(); match export { ExportIndex::Function(index) => { @@ -1272,12 +1272,12 @@ impl InstanceHandle { /// Return a reference to the custom state attached to this instance. pub fn host_state(&self) -> &dyn Any { - self.instance().instance_ref().host_state() + self.instance().as_ref().host_state() } /// Return the memory index for the given `VMMemoryDefinition` in this instance. pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex { - self.instance().instance_ref().memory_index(memory) + self.instance().as_ref().memory_index(memory) } /// Grow memory in this instance by the specified amount of pages. @@ -1292,14 +1292,12 @@ impl InstanceHandle { where IntoPages: Into, { - self.instance() - .instance_ref() - .memory_grow(memory_index, delta) + self.instance().as_ref().memory_grow(memory_index, delta) } /// Return the table index for the given `VMTableDefinition` in this instance. pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex { - self.instance().instance_ref().table_index(table) + self.instance().as_ref().table_index(table) } /// Grow table in this instance by the specified amount of pages. @@ -1307,9 +1305,7 @@ impl InstanceHandle { /// Returns `None` if memory can't be grown by the specified amount /// of pages. pub fn table_grow(&self, table_index: LocalTableIndex, delta: u32) -> Option { - self.instance() - .instance_ref() - .table_grow(table_index, delta) + self.instance().as_ref().table_grow(table_index, delta) } /// Get table element reference. @@ -1320,7 +1316,7 @@ impl InstanceHandle { table_index: LocalTableIndex, index: u32, ) -> Option { - self.instance().instance_ref().table_get(table_index, index) + self.instance().as_ref().table_get(table_index, index) } /// Set table element reference. @@ -1332,14 +1328,12 @@ impl InstanceHandle { index: u32, val: VMCallerCheckedAnyfunc, ) -> Result<(), Trap> { - self.instance() - .instance_ref() - .table_set(table_index, index, val) + self.instance().as_ref().table_set(table_index, index, val) } /// Get a table defined locally within this module. pub fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table { - self.instance().instance_ref().get_local_table(index) + self.instance().as_ref().get_local_table(index) } } @@ -1353,7 +1347,7 @@ cfg_if::cfg_if! { where H: 'static + Fn(libc::c_int, *const libc::siginfo_t, *const libc::c_void) -> bool, { - self.instance().instance_ref().signal_handler.set(Some(Box::new(handler))); + self.instance().as_ref().signal_handler.set(Some(Box::new(handler))); } } } else if #[cfg(target_os = "windows")] { @@ -1365,7 +1359,7 @@ cfg_if::cfg_if! { where H: 'static + Fn(winapi::um::winnt::PEXCEPTION_POINTERS) -> bool, { - self.instance().instance_ref().signal_handler.set(Some(Box::new(handler))); + self.instance().as_ref().signal_handler.set(Some(Box::new(handler))); } } } From 0e1ed94b5a0ad7cb496a992b89e479f8cf1c2773 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 12:02:24 +0100 Subject: [PATCH 13/27] fix(vm) Fix the pointer size when doing arithmetics. --- lib/vm/src/instance.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 1174158a0f4..a946fa533ca 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -1077,7 +1077,11 @@ impl InstanceHandle { let num_memories = usize::try_from(num_memories).unwrap(); let mut out = Vec::with_capacity(num_memories); - let base_ptr = instance_ptr.as_ptr().add(mem::size_of::()); + // We need to do some pointer arithmetic now. The unit is `u8`. + let ptr = instance_ptr.cast::().as_ptr(); + + // + let base_ptr = ptr.add(mem::size_of::()); for i in 0..num_memories { let mem_offset = offsets.vmctx_vmmemory_definition(LocalMemoryIndex::new(i)); @@ -1111,7 +1115,9 @@ impl InstanceHandle { let num_tables = usize::try_from(num_tables).unwrap(); let mut out = Vec::with_capacity(num_tables); - let base_ptr = instance_ptr.as_ptr().add(std::mem::size_of::()); + // We need to do some pointer arithmetic now. The unit is `u8`. + let ptr = instance_ptr.cast::().as_ptr(); + let base_ptr = ptr.add(std::mem::size_of::()); for i in 0..num_tables { let table_offset = offsets.vmctx_vmtable_definition(LocalTableIndex::new(i)); From 26d20e0c58c420d5c19682a4a0c6e18d87f2f237 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 12:05:25 +0100 Subject: [PATCH 14/27] feat(vm) Move `MAX_REFCOUNT` as an associated constant of `InstanceAllocator`. --- lib/vm/src/instance.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index a946fa533ca..a5d68254afd 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -726,6 +726,12 @@ pub struct InstanceAllocator { } impl InstanceAllocator { + /// A soft limit on the amount of references that may be made to an `InstanceAllocator`. + /// + /// Going above this limit will make the program to panic at exactly + /// `MAX_REFCOUNT` references. + const MAX_REFCOUNT: usize = std::usize::MAX - 1; + /// Create a new `InstanceAllocator`. It allocates nothing. It /// fills nothing. The `Instance` must be already valid and /// filled. `self_ptr` and `self_layout` must be the pointer and @@ -803,12 +809,6 @@ impl InstanceAllocator { unsafe impl Send for InstanceAllocator {} unsafe impl Sync for InstanceAllocator {} -/// A soft limit on the amount of references that may be made to an `InstanceAllocator`. -/// -/// Going above this limit will make the program to panic at exactly -/// `MAX_REFCOUNT` references. -const MAX_REFCOUNT: usize = std::usize::MAX - 1; - impl Clone for InstanceAllocator { /// Makes a clone of `InstanceAllocator`. /// @@ -818,7 +818,7 @@ impl Clone for InstanceAllocator { fn clone(&self) -> Self { let old_size = self.strong.fetch_add(1, atomic::Ordering::Relaxed); - if old_size > MAX_REFCOUNT { + if old_size > Self::MAX_REFCOUNT { panic!("Too much references of `InstanceAllocator`"); } From df0e30bbeb177fe556634730069030eaad6f8672 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 12:06:02 +0100 Subject: [PATCH 15/27] fix(vm) Fix a typo in a panic message. --- lib/vm/src/instance.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index a5d68254afd..df0f80003c6 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -819,7 +819,7 @@ impl Clone for InstanceAllocator { let old_size = self.strong.fetch_add(1, atomic::Ordering::Relaxed); if old_size > Self::MAX_REFCOUNT { - panic!("Too much references of `InstanceAllocator`"); + panic!("Too many references of `InstanceAllocator`"); } Self { From cae4bc699f9e64cc75fe449916479ebeb9eb5ad8 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 13:20:59 +0100 Subject: [PATCH 16/27] fix(api) Fix `new_native_with_unsafe_mutable_env`. --- lib/api/src/externals/function.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/api/src/externals/function.rs b/lib/api/src/externals/function.rs index df775618bd7..da88a60d065 100644 --- a/lib/api/src/externals/function.rs +++ b/lib/api/src/externals/function.rs @@ -303,6 +303,7 @@ impl Function { vmctx, signature, call_trampoline: None, + instance_allocator: None, }, } } From 081d3e3edc9cbbf4117715163c90c3c476ca7331 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 13:48:30 +0100 Subject: [PATCH 17/27] fix(vm) Remove debugging statements. --- lib/api/src/instance.rs | 6 ------ lib/vm/src/instance.rs | 12 ------------ 2 files changed, 18 deletions(-) diff --git a/lib/api/src/instance.rs b/lib/api/src/instance.rs index 521fcea7ebd..21986eea8e1 100644 --- a/lib/api/src/instance.rs +++ b/lib/api/src/instance.rs @@ -115,9 +115,3 @@ impl fmt::Debug for Instance { .finish() } } - -impl Drop for Instance { - fn drop(&mut self) { - println!("Dropping `wasmer::Instance`"); - } -} diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index df0f80003c6..64985777990 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -838,12 +838,6 @@ impl PartialEq for InstanceAllocator { } } -impl Drop for Instance { - fn drop(&mut self) { - println!("Dropping `wasmer_vm::Instance`"); - } -} - impl Drop for InstanceAllocator { /// Drop the `InstanceAllocator`. /// @@ -851,19 +845,13 @@ impl Drop for InstanceAllocator { /// 1, then the `Self.instance` will be deallocated with /// `Self::deallocate_instance`. fn drop(&mut self) { - println!("Trying to drop `wasmer_vm::InstanceAllocator`..."); - // Because `fetch_sub` is already atomic, we do not need to // synchronize with other threads unless we are going to // delete the object. if self.strong.fetch_sub(1, atomic::Ordering::Release) != 1 { - println!("... not now"); - return; } - println!("Yep, dropping it."); - // This fence is needed to prevent reordering of use of the data and // deletion of the data. Because it is marked `Release`, the decreasing // of the reference count synchronizes with this `Acquire` fence. This From e94f9944034939241922230c92622af0295e5a61 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 13:53:16 +0100 Subject: [PATCH 18/27] doc(vm) Add more documentation. --- lib/vm/src/instance.rs | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 64985777990..fa43af6f8e6 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -816,8 +816,32 @@ impl Clone for InstanceAllocator { /// `instance` pointer, increasing the strong reference count. #[inline] fn clone(&self) -> Self { + // Using a relaxed ordering is alright here, as knowledge of + // the original reference prevents other threads from + // erroneously deleting the object. + // + // As explained in the [Boost documentation][1]: + // + // > Increasing the reference counter can always be done with + // > `memory_order_relaxed`: New references to an object can + // > only be formed from an existing reference, and passing an + // > existing reference from one thread to another must already + // > provide any required synchronization. + // + // [1]: https://www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html let old_size = self.strong.fetch_add(1, atomic::Ordering::Relaxed); + // However we need to guard against massive refcounts in case + // someone is `mem::forget`ing `InstanceAllocator`. If we + // don't do this the count can overflow and users will + // use-after free. We racily saturate to `isize::MAX` on the + // assumption that there aren't ~2 billion threads + // incrementing the reference count at once. This branch will + // never be taken in any realistic program. + // + // We abort because such a program is incredibly degenerate, + // and we don't care to support it. + if old_size > Self::MAX_REFCOUNT { panic!("Too many references of `InstanceAllocator`"); } @@ -859,7 +883,7 @@ impl Drop for InstanceAllocator { // count, which happens before this fence, which happens before the // deletion of the data. // - // As explained in the [Boost documentation][1], + // As explained in the [Boost documentation][1]: // // > It is important to enforce any possible access to the object in one // > thread (through an existing reference) to *happen before* deleting From 70455a98a0d14ae4b2b37f6ac1ec08bd6739ebb6 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 13:58:00 +0100 Subject: [PATCH 19/27] doc(vm) Give more details in the API documentation. --- lib/vm/src/instance.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index fa43af6f8e6..90df30ccbd8 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -1075,8 +1075,10 @@ impl InstanceHandle { /// memory in the VM. /// /// # Safety - /// - `instance_ptr` must point to enough memory that all of the offsets in - /// `offsets` point to valid locations in memory. + /// - `instance_ptr` must point to enough memory that all of the + /// offsets in `offsets` point to valid locations in memory, + /// i.e. `instance_ptr` must have been allocated by + /// `InstanceHandle::allocate_instance`. pub unsafe fn memory_definition_locations( instance_ptr: NonNull, offsets: &VMOffsets, @@ -1113,8 +1115,10 @@ impl InstanceHandle { /// memory in the VM. /// /// # Safety - /// - `instance_ptr` must point to enough memory that all of the offsets in - /// `offsets` point to valid locations in memory. + /// - `instance_ptr` must point to enough memory that all of the + /// offsets in `offsets` point to valid locations in memory, + /// i.e. `instance_ptr` must have been allocated by + /// `InstanceHandle::allocate_instance`. pub unsafe fn table_definition_locations( instance_ptr: NonNull, offsets: &VMOffsets, From c649beceffaf3bc55f7826b5908a35cd40b1d550 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 14:03:45 +0100 Subject: [PATCH 20/27] doc(vm) Remove a useless part of the documentation. --- lib/vm/src/instance.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 90df30ccbd8..f4afcf9180d 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -870,8 +870,7 @@ impl Drop for InstanceAllocator { /// `Self::deallocate_instance`. fn drop(&mut self) { // Because `fetch_sub` is already atomic, we do not need to - // synchronize with other threads unless we are going to - // delete the object. + // synchronize with other thread. if self.strong.fetch_sub(1, atomic::Ordering::Release) != 1 { return; } From 8e17a4ce4182ede157e55c561fb6bf62fad78cd0 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 14:23:31 +0100 Subject: [PATCH 21/27] fixup --- lib/api/src/externals/function.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/lib/api/src/externals/function.rs b/lib/api/src/externals/function.rs index da88a60d065..def361e1078 100644 --- a/lib/api/src/externals/function.rs +++ b/lib/api/src/externals/function.rs @@ -58,12 +58,6 @@ pub struct Function { pub(crate) exported: ExportFunction, } -impl Drop for Function { - fn drop(&mut self) { - println!("Dropping `wasmer::Function`"); - } -} - impl Function { /// Creates a new host `Function` (dynamic) with the provided signature. /// From 0ec3b70b2267079e4f3ac84fd24b8cf465b0e6d8 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 15:09:04 +0100 Subject: [PATCH 22/27] feat(vm) Change how the layout of `Instance` is calculated. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The old code is working because the alignment of `Instance` is 16, and its size is 400. Even with a really large `VMContext`, it will still fit in an alignment of 8. But the way it is calculated is wrong. The size is correctly calculated, but the alignment misses `VMContext`. Quoting our own documentation: > It is important to remind that Instance is dynamically-sized based > on `VMOffsets`: The `Instance.vmctx` field represents a > dynamically-sized array that extends beyond the nominal end of the > type. So first, let's compute the layout of the `Instance.vmctx` “array” with `Layout::array`. Second, let's compute the layout of `Instance` itself with `Layout::new`. And third, let's compute the entire layout by using `Layout::extend` + `Layout::pad_to_align`. --- lib/vm/src/instance.rs | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index f4afcf9180d..15f024bc889 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -747,15 +747,17 @@ impl InstanceAllocator { /// Calculate the appropriate layout for `Instance`. fn instance_layout(offsets: &VMOffsets) -> Layout { - let size = mem::size_of::() - .checked_add( - usize::try_from(offsets.size_of_vmctx()) - .expect("Failed to convert the size of `vmctx` to a `usize`"), - ) - .expect("Failed to compute the size of `Instance`"); - let align = mem::align_of::(); + let vmctx_size = usize::try_from(offsets.size_of_vmctx()) + .expect("Failed to convert the size of `vmctx` to a `usize`"); + + let instance_vmctx_layout = + Layout::array::(vmctx_size).expect("Failed to create a layout for `VMContext`"); + + let (instance_layout, _offset) = Layout::new::() + .extend(instance_vmctx_layout) + .expect("Failed to extend to `Instance` layout to include `VMContext`"); - Layout::from_size_align(size, align).unwrap() + instance_layout.pad_to_align() } /// Allocate `Instance` (it is an uninitialized pointer). From 5a00f2686706a1ec3f5f551029d8bac3c586918e Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 15:39:41 +0100 Subject: [PATCH 23/27] feat(vm) More functions can be constant functions. --- lib/vm/src/vmoffsets.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/vm/src/vmoffsets.rs b/lib/vm/src/vmoffsets.rs index b18408165d3..3ca2073cb34 100644 --- a/lib/vm/src/vmoffsets.rs +++ b/lib/vm/src/vmoffsets.rs @@ -95,20 +95,20 @@ impl VMOffsets { impl VMOffsets { /// The offset of the `body` field. #[allow(clippy::erasing_op)] - pub fn vmfunction_import_body(&self) -> u8 { + pub const fn vmfunction_import_body(&self) -> u8 { 0 * self.pointer_size } /// The offset of the `vmctx` field. #[allow(clippy::identity_op)] - pub fn vmfunction_import_vmctx(&self) -> u8 { + pub const fn vmfunction_import_vmctx(&self) -> u8 { 1 * self.pointer_size } /// Return the size of [`VMFunctionImport`]. /// /// [`VMFunctionImport`]: crate::vmcontext::VMFunctionImport - pub fn size_of_vmfunction_import(&self) -> u8 { + pub const fn size_of_vmfunction_import(&self) -> u8 { 2 * self.pointer_size } } @@ -119,20 +119,20 @@ impl VMOffsets { impl VMOffsets { /// The offset of the `address` field. #[allow(clippy::erasing_op)] - pub fn vmdynamicfunction_import_context_address(&self) -> u8 { + pub const fn vmdynamicfunction_import_context_address(&self) -> u8 { 0 * self.pointer_size } /// The offset of the `ctx` field. #[allow(clippy::identity_op)] - pub fn vmdynamicfunction_import_context_ctx(&self) -> u8 { + pub const fn vmdynamicfunction_import_context_ctx(&self) -> u8 { 1 * self.pointer_size } /// Return the size of [`VMDynamicFunctionContext`]. /// /// [`VMDynamicFunctionContext`]: crate::vmcontext::VMDynamicFunctionContext - pub fn size_of_vmdynamicfunction_import_context(&self) -> u8 { + pub const fn size_of_vmdynamicfunction_import_context(&self) -> u8 { 2 * self.pointer_size } } From dc2f5cf36759189009347a1e93a47479e4407b02 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 16:04:06 +0100 Subject: [PATCH 24/27] doc(changelog) Add #1837. --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2819a3b6781..acbb69b3c74 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ ### Added +- [#1837](https://github.com/wasmerio/wasmer/pull/1837) It is now possible to use exports of an `Intance` even after the `Instance` has been freed - [#1831](https://github.com/wasmerio/wasmer/pull/1831) Added support for Apple Silicon chips (`arm64-apple-darwin`) - [#1649](https://github.com/wasmerio/wasmer/pull/1649) Add outline of migration to 1.0.0 docs. From 286426ac65b7ec2e62fdcc51faa9febe497cad84 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Thu, 26 Nov 2020 16:21:39 +0100 Subject: [PATCH 25/27] doc(vm) Update documentation of exports for the `instance_allocator` field. --- lib/vm/src/export.rs | 37 ++++++++++++++++++++++++++----------- 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/lib/vm/src/export.rs b/lib/vm/src/export.rs index ffd5b5da08b..9a4532d8402 100644 --- a/lib/vm/src/export.rs +++ b/lib/vm/src/export.rs @@ -30,17 +30,26 @@ pub enum Export { pub struct ExportFunction { /// The address of the native-code function. pub address: *const VMFunctionBody, + /// Pointer to the containing `VMContext`. pub vmctx: VMFunctionEnvironment, + /// The function type, used for compatibility checking. pub signature: FunctionType, - /// The function kind (specifies the calling convention for the function). + + /// The function kind (specifies the calling convention for the + /// function). pub kind: VMFunctionKind, - /// Address of the function call trampoline owned by the same VMContext that owns the VMFunctionBody. - /// May be None when the function is a host-function (FunctionType == Dynamic or vmctx == nullptr). + + /// Address of the function call trampoline owned by the same + /// VMContext that owns the VMFunctionBody. + /// + /// May be `None` when the function is a host function (`FunctionType` + /// == `Dynamic` or `vmctx` == `nullptr`). pub call_trampoline: Option, - /// A reference to the instance, to ensure it's not deallocated - /// before the function. + + /// A “reference” to the instance through the + /// `InstanceAllocator`. `None` if it is a host function. pub instance_allocator: Option, } @@ -63,8 +72,9 @@ impl From for Export { pub struct ExportTable { /// Pointer to the containing `Table`. pub from: Arc, - /// A reference to the instance, to ensure it's not deallocated - /// before the table. + + /// A “reference” to the instance through the + /// `InstanceAllocator`. `None` if it is a host function. pub instance_allocator: Option, } @@ -73,6 +83,7 @@ pub struct ExportTable { /// correct use of the raw table from multiple threads via `definition` requires `unsafe` /// and is the responsibilty of the user of this type. unsafe impl Send for ExportTable {} + /// # Safety /// This is correct because the values directly in `definition` should be considered immutable /// and the type is both `Send` and `Clone` (thus marking it `Sync` adds no new behavior, it @@ -107,8 +118,9 @@ impl From for Export { pub struct ExportMemory { /// Pointer to the containing `Memory`. pub from: Arc, - /// A reference to the instance, to ensure it's not deallocated - /// before the memory. + + /// A “reference” to the instance through the + /// `InstanceAllocator`. `None` if it is a host function. pub instance_allocator: Option, } @@ -117,6 +129,7 @@ pub struct ExportMemory { /// correct use of the raw memory from multiple threads via `definition` requires `unsafe` /// and is the responsibilty of the user of this type. unsafe impl Send for ExportMemory {} + /// # Safety /// This is correct because the values directly in `definition` should be considered immutable /// and the type is both `Send` and `Clone` (thus marking it `Sync` adds no new behavior, it @@ -151,8 +164,9 @@ impl From for Export { pub struct ExportGlobal { /// The global declaration, used for compatibility checking. pub from: Arc, - /// A reference to the instance, to ensure it's not deallocated - /// before the global. + + /// A “reference” to the instance through the + /// `InstanceAllocator`. `None` if it is a host function. pub instance_allocator: Option, } @@ -161,6 +175,7 @@ pub struct ExportGlobal { /// correct use of the raw global from multiple threads via `definition` requires `unsafe` /// and is the responsibilty of the user of this type. unsafe impl Send for ExportGlobal {} + /// # Safety /// This is correct because the values directly in `definition` should be considered immutable /// from the perspective of users of this type and the type is both `Send` and `Clone` (thus From 7621991a9ab2414ebf558a0ed958a8d50ce46c97 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Fri, 27 Nov 2020 10:25:50 +0100 Subject: [PATCH 26/27] test(api) Test that exports work after the instance has been freed. --- lib/api/tests/instance.rs | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 lib/api/tests/instance.rs diff --git a/lib/api/tests/instance.rs b/lib/api/tests/instance.rs new file mode 100644 index 00000000000..69733877424 --- /dev/null +++ b/lib/api/tests/instance.rs @@ -0,0 +1,39 @@ +use anyhow::Result; +use wasmer::*; + +#[test] +fn exports_work_after_multiple_instances_have_been_freed() -> Result<()> { + let store = Store::default(); + let module = Module::new( + &store, + " + (module + (type $sum_t (func (param i32 i32) (result i32))) + (func $sum_f (type $sum_t) (param $x i32) (param $y i32) (result i32) + local.get $x + local.get $y + i32.add) + (export \"sum\" (func $sum_f))) +", + )?; + + let import_object = ImportObject::new(); + let instance = Instance::new(&module, &import_object)?; + let instance2 = instance.clone(); + let instance3 = instance.clone(); + + // The function is cloned to “break” the connection with `instance`. + let sum = instance.exports.get_function("sum")?.clone(); + + drop(instance); + drop(instance2); + drop(instance3); + + // All instances have been dropped, but `sum` continues to work! + assert_eq!( + sum.call(&[Value::I32(1), Value::I32(2)])?.into_vec(), + vec![Value::I32(3)], + ); + + Ok(()) +} From 2f9da8bca4e3582c4ec7ae3925e4476e7aa25507 Mon Sep 17 00:00:00 2001 From: Ivan Enderlin Date: Tue, 1 Dec 2020 09:24:28 +0100 Subject: [PATCH 27/27] feat(vm) Mark `InstanceAllocator::new` as unsafe. --- lib/vm/src/instance.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/vm/src/instance.rs b/lib/vm/src/instance.rs index 15f024bc889..fdf0bb9ebc9 100644 --- a/lib/vm/src/instance.rs +++ b/lib/vm/src/instance.rs @@ -737,7 +737,14 @@ impl InstanceAllocator { /// filled. `self_ptr` and `self_layout` must be the pointer and /// the layout returned by `Self::allocate_self` used to build /// `Self`. - fn new(instance: NonNull, instance_layout: Layout) -> Self { + /// + /// # Safety + /// + /// `instance` must a non-null, non-dangling, properly aligned, + /// and correctly initialized pointer to `Instance`. See + /// `InstanceHandle::new` for an example of how to correctly use + /// this API. + unsafe fn new(instance: NonNull, instance_layout: Layout) -> Self { Self { strong: Arc::new(atomic::AtomicUsize::new(1)), instance_layout, @@ -1008,6 +1015,9 @@ impl InstanceHandle { // `instance_ptr` is passed to `InstanceAllocator`, which // makes it the only “owner” (it doesn't own the value, // it's just the semantics we define). + // + // SAFETY: `instance_ptr` fulfills all the requirement of + // `InstanceAllocator::new`. let instance_allocator = InstanceAllocator::new(instance_ptr, instance_layout); Self {