diff --git a/benches/call.rs b/benches/call.rs index 6834a6891ad7..601b81bbfed9 100644 --- a/benches/call.rs +++ b/benches/call.rs @@ -189,7 +189,7 @@ fn bench_host_to_wasm( let mut space = vec![ValRaw::i32(0); params.len().max(results.len())]; b.iter(|| unsafe { for (i, param) in params.iter().enumerate() { - space[i] = param.to_raw(&mut *store); + space[i] = param.to_raw(&mut *store).unwrap(); } untyped .call_unchecked(&mut *store, space.as_mut_ptr(), space.len()) @@ -348,7 +348,7 @@ fn wasm_to_host(c: &mut Criterion) { Val::I64(0) => {} _ => unreachable!(), } - space[0] = Val::F32(0).to_raw(&mut caller); + space[0] = Val::F32(0).to_raw(&mut caller).unwrap(); Ok(()) }) .unwrap(); diff --git a/crates/c-api/include/wasmtime.h b/crates/c-api/include/wasmtime.h index 6ba3e76b8f03..e408dd877905 100644 --- a/crates/c-api/include/wasmtime.h +++ b/crates/c-api/include/wasmtime.h @@ -152,9 +152,7 @@ * provided access to it. For example in a host function created with * #wasmtime_func_new you can use #wasmtime_context_t in the host function * callback. This is because an argument, a #wasmtime_caller_t, provides access - * to #wasmtime_context_t. On the other hand a destructor passed to - * #wasmtime_externref_new, however, cannot use a #wasmtime_context_t because - * it was not provided access to one. Doing so may lead to memory unsafety. + * to #wasmtime_context_t. * * ### Stores * diff --git a/crates/c-api/include/wasmtime/val.h b/crates/c-api/include/wasmtime/val.h index 61c82873be50..4c047b4955ef 100644 --- a/crates/c-api/include/wasmtime/val.h +++ b/crates/c-api/include/wasmtime/val.h @@ -14,81 +14,6 @@ extern "C" { #endif -/** - * \typedef wasmtime_externref_t - * \brief Convenience alias for #wasmtime_externref - * - * \struct wasmtime_externref - * \brief A host-defined un-forgeable reference to pass into WebAssembly. - * - * This structure represents an `externref` that can be passed to WebAssembly. - * It cannot be forged by WebAssembly itself and is guaranteed to have been - * created by the host. - */ -typedef struct wasmtime_externref wasmtime_externref_t; - -/** - * \brief Create a new `externref` value. - * - * Creates a new `externref` value wrapping the provided data, returning the - * pointer to the externref. - * - * \param data the host-specific data to wrap - * \param finalizer an optional finalizer for `data` - * - * When the reference is reclaimed, the wrapped data is cleaned up with the - * provided `finalizer`. - * - * The returned value must be deleted with #wasmtime_externref_delete - */ -WASM_API_EXTERN wasmtime_externref_t * -wasmtime_externref_new(void *data, void (*finalizer)(void *)); - -/** - * \brief Get an `externref`'s wrapped data - * - * Returns the original `data` passed to #wasmtime_externref_new. It is required - * that `data` is not `NULL`. - */ -WASM_API_EXTERN void *wasmtime_externref_data(wasmtime_externref_t *data); - -/** - * \brief Creates a shallow copy of the `externref` argument, returning a - * separately owned pointer (increases the reference count). - */ -WASM_API_EXTERN wasmtime_externref_t * -wasmtime_externref_clone(wasmtime_externref_t *ref); - -/** - * \brief Decrements the reference count of the `ref`, deleting it if it's the - * last reference. - */ -WASM_API_EXTERN void wasmtime_externref_delete(wasmtime_externref_t *ref); - -/** - * \brief Converts a raw `externref` value coming from #wasmtime_val_raw_t into - * a #wasmtime_externref_t. - * - * Note that the returned #wasmtime_externref_t is an owned value that must be - * deleted via #wasmtime_externref_delete by the caller if it is non-null. - */ -WASM_API_EXTERN wasmtime_externref_t * -wasmtime_externref_from_raw(wasmtime_context_t *context, void *raw); - -/** - * \brief Converts a #wasmtime_externref_t to a raw value suitable for storing - * into a #wasmtime_val_raw_t. - * - * Note that the returned underlying value is not tracked by Wasmtime's garbage - * collector until it enters WebAssembly. This means that a GC may release the - * context's reference to the raw value, making the raw value invalid within the - * context of the store. Do not perform a GC between calling this function and - * passing it to WebAssembly. - */ -WASM_API_EXTERN void * -wasmtime_externref_to_raw(wasmtime_context_t *context, - const wasmtime_externref_t *ref); - /// \brief Discriminant stored in #wasmtime_val::kind typedef uint8_t wasmtime_valkind_t; /// \brief Value of #wasmtime_valkind_t meaning that #wasmtime_val_t is an i32 @@ -104,9 +29,6 @@ typedef uint8_t wasmtime_valkind_t; /// \brief Value of #wasmtime_valkind_t meaning that #wasmtime_val_t is a /// funcref #define WASMTIME_FUNCREF 5 -/// \brief Value of #wasmtime_valkind_t meaning that #wasmtime_val_t is an -/// externref -#define WASMTIME_EXTERNREF 6 /// \brief A 128-bit value representing the WebAssembly `v128` type. Bytes are /// stored in little-endian order. @@ -136,11 +58,6 @@ typedef union wasmtime_valunion { /// If this value represents a `ref.null func` value then the `store_id` field /// is set to zero. wasmtime_func_t funcref; - /// Field used if #wasmtime_val_t::kind is #WASMTIME_EXTERNREF - /// - /// If this value represents a `ref.null extern` value then this pointer will - /// be `NULL`. - wasmtime_externref_t *externref; /// Field used if #wasmtime_val_t::kind is #WASMTIME_V128 wasmtime_v128 v128; } wasmtime_valunion_t; @@ -186,14 +103,6 @@ typedef union wasmtime_val_raw { /// /// Note that this field is always stored in a little-endian format. void *funcref; - /// Field for when this val is a WebAssembly `externref` value. - /// - /// If this is set to 0 then it's a null externref, otherwise this must be - /// passed to `wasmtime_externref_from_raw` to determine the - /// `wasmtime_externref_t`. - /// - /// Note that this field is always stored in a little-endian format. - void *externref; } wasmtime_val_raw_t; /** @@ -203,11 +112,9 @@ typedef union wasmtime_val_raw { * \union wasmtime_val * \brief Container for different kinds of wasm values. * - * Note that this structure may contain an owned value, namely - * #wasmtime_externref_t, depending on the context in which this is used. APIs - * which consume a #wasmtime_val_t do not take ownership, but APIs that return - * #wasmtime_val_t require that #wasmtime_val_delete is called to deallocate - * the value. + * APIs which consume a #wasmtime_val_t do not take ownership, but APIs that + * return #wasmtime_val_t require that #wasmtime_val_delete is called to + * deallocate the value. */ typedef struct wasmtime_val { /// Discriminant of which field of #of is valid. @@ -222,12 +129,14 @@ typedef struct wasmtime_val { * Note that this only deletes the contents, not the memory that `val` points to * itself (which is owned by the caller). */ -WASM_API_EXTERN void wasmtime_val_delete(wasmtime_val_t *val); +WASM_API_EXTERN void wasmtime_val_delete(wasmtime_context_t *context, + wasmtime_val_t *val); /** * \brief Copies `src` into `dst`. */ -WASM_API_EXTERN void wasmtime_val_copy(wasmtime_val_t *dst, +WASM_API_EXTERN void wasmtime_val_copy(wasmtime_context_t *context, + wasmtime_val_t *dst, const wasmtime_val_t *src); #ifdef __cplusplus diff --git a/crates/c-api/src/async.rs b/crates/c-api/src/async.rs index 99ee3387b499..a5952297e4d1 100644 --- a/crates/c-api/src/async.rs +++ b/crates/c-api/src/async.rs @@ -113,7 +113,12 @@ async fn invoke_c_async_callback<'a>( let mut hostcall_val_storage = mem::take(&mut caller.data_mut().hostcall_val_storage); debug_assert!(hostcall_val_storage.is_empty()); hostcall_val_storage.reserve(params.len() + results.len()); - hostcall_val_storage.extend(params.iter().cloned().map(|p| wasmtime_val_t::from_val(p))); + hostcall_val_storage.extend( + params + .iter() + .cloned() + .map(|p| wasmtime_val_t::from_val(&mut caller, p)), + ); hostcall_val_storage.extend((0..results.len()).map(|_| wasmtime_val_t { kind: WASMTIME_I32, of: wasmtime_val_union { i32: 0 }, @@ -151,7 +156,7 @@ async fn invoke_c_async_callback<'a>( // Translate the `wasmtime_val_t` results into the `results` space for (i, result) in out_results.iter().enumerate() { unsafe { - results[i] = result.to_val(); + results[i] = result.to_val(&mut caller.caller); } } // Move our `vals` storage back into the store now that we no longer @@ -229,7 +234,7 @@ async fn do_func_call_async( match result { Ok(()) => { for (slot, val) in results.iter_mut().zip(wt_results.iter()) { - crate::initialize(slot, wasmtime_val_t::from_val(val.clone())); + crate::initialize(slot, wasmtime_val_t::from_val(&mut store, val.clone())); } params.truncate(0); store.data_mut().wasm_val_storage = params; @@ -240,7 +245,7 @@ async fn do_func_call_async( #[no_mangle] pub unsafe extern "C" fn wasmtime_func_call_async<'a>( - store: CStoreContextMut<'a>, + mut store: CStoreContextMut<'a>, func: &'a Func, args: *const wasmtime_val_t, nargs: usize, @@ -251,10 +256,16 @@ pub unsafe extern "C" fn wasmtime_func_call_async<'a>( ) -> Box> { let args = crate::slice_from_raw_parts(args, nargs) .iter() - .map(|i| i.to_val()); + .map(|i| i.to_val(&mut store)) + .collect::>(); let results = crate::slice_from_raw_parts_mut(results, nresults); let fut = Box::pin(do_func_call_async( - store, func, args, results, trap_ret, err_ret, + store, + func, + args.into_iter(), + results, + trap_ret, + err_ret, )); Box::new(wasmtime_call_future_t { underlying: fut }) } diff --git a/crates/c-api/src/func.rs b/crates/c-api/src/func.rs index 4d02707085c0..52d6c526abcf 100644 --- a/crates/c-api/src/func.rs +++ b/crates/c-api/src/func.rs @@ -249,7 +249,12 @@ pub(crate) unsafe fn c_callback_to_rust_fn( let mut vals = mem::take(&mut caller.data_mut().hostcall_val_storage); debug_assert!(vals.is_empty()); vals.reserve(params.len() + results.len()); - vals.extend(params.iter().cloned().map(|p| wasmtime_val_t::from_val(p))); + vals.extend( + params + .iter() + .cloned() + .map(|p| wasmtime_val_t::from_val(&mut caller, p)), + ); vals.extend((0..results.len()).map(|_| wasmtime_val_t { kind: crate::WASMTIME_I32, of: wasmtime_val_union { i32: 0 }, @@ -272,7 +277,7 @@ pub(crate) unsafe fn c_callback_to_rust_fn( // Translate the `wasmtime_val_t` results into the `results` space for (i, result) in out_results.iter().enumerate() { - results[i] = result.to_val(); + results[i] = result.to_val(&mut caller.caller); } // Move our `vals` storage back into the store now that we no longer @@ -330,7 +335,7 @@ pub unsafe extern "C" fn wasmtime_func_call( &mut params, crate::slice_from_raw_parts(args, nargs) .iter() - .map(|i| i.to_val()), + .map(|i| i.to_val(&mut store)), nresults, ); @@ -345,7 +350,7 @@ pub unsafe extern "C" fn wasmtime_func_call( Ok(Ok(())) => { let results = crate::slice_from_raw_parts_mut(results, nresults); for (slot, val) in results.iter_mut().zip(wt_results.iter()) { - crate::initialize(slot, wasmtime_val_t::from_val(val.clone())); + crate::initialize(slot, wasmtime_val_t::from_val(&mut store, val.clone())); } params.truncate(0); store.data_mut().wasm_val_storage = params; diff --git a/crates/c-api/src/global.rs b/crates/c-api/src/global.rs index 653855b52894..8fcc392e2170 100644 --- a/crates/c-api/src/global.rs +++ b/crates/c-api/src/global.rs @@ -79,12 +79,13 @@ pub unsafe extern "C" fn wasm_global_set(g: &mut wasm_global_t, val: &wasm_val_t #[no_mangle] pub unsafe extern "C" fn wasmtime_global_new( - store: CStoreContextMut<'_>, + mut store: CStoreContextMut<'_>, gt: &wasm_globaltype_t, val: &wasmtime_val_t, ret: &mut Global, ) -> Option> { - let global = Global::new(store, gt.ty().ty.clone(), val.to_val()); + let val = val.to_val(&mut store); + let global = Global::new(store, gt.ty().ty.clone(), val); handle_result(global, |global| { *ret = global; }) @@ -100,18 +101,20 @@ pub extern "C" fn wasmtime_global_type( #[no_mangle] pub extern "C" fn wasmtime_global_get( - store: CStoreContextMut<'_>, + mut store: CStoreContextMut<'_>, global: &Global, val: &mut MaybeUninit, ) { - crate::initialize(val, wasmtime_val_t::from_val(global.get(store))) + let gval = global.get(&mut store); + crate::initialize(val, wasmtime_val_t::from_val(store, gval)) } #[no_mangle] pub unsafe extern "C" fn wasmtime_global_set( - store: CStoreContextMut<'_>, + mut store: CStoreContextMut<'_>, global: &Global, val: &wasmtime_val_t, ) -> Option> { - handle_result(global.set(store, val.to_val()), |()| {}) + let val = val.to_val(&mut store); + handle_result(global.set(store, val), |()| {}) } diff --git a/crates/c-api/src/ref.rs b/crates/c-api/src/ref.rs index 2e90682c1061..19085ae4b58e 100644 --- a/crates/c-api/src/ref.rs +++ b/crates/c-api/src/ref.rs @@ -41,14 +41,9 @@ pub extern "C" fn wasm_ref_copy(r: Option<&wasm_ref_t>) -> Option, b: Option<&wasm_ref_t>) -> bool { - match (a.map(|a| &a.r), b.map(|b| &b.r)) { - (Some(Ref::Extern(Some(a))), Some(Ref::Extern(Some(b)))) => a.ptr_eq(b), - (None, None) => true, - // Note: we don't support equality for `Func`, so we always return - // `false` for `funcref`s. - _ => false, - } +pub extern "C" fn wasm_ref_same(_a: Option<&wasm_ref_t>, _b: Option<&wasm_ref_t>) -> bool { + // We need a store to determine whether these are the same reference or not. + abort("wasm_ref_same") } #[no_mangle] diff --git a/crates/c-api/src/table.rs b/crates/c-api/src/table.rs index 9852d13354ac..f4ffb8cc1120 100644 --- a/crates/c-api/src/table.rs +++ b/crates/c-api/src/table.rs @@ -115,13 +115,13 @@ pub extern "C" fn wasm_table_as_extern_const(t: &wasm_table_t) -> &wasm_extern_t #[no_mangle] pub unsafe extern "C" fn wasmtime_table_new( - store: CStoreContextMut<'_>, + mut store: CStoreContextMut<'_>, tt: &wasm_tabletype_t, init: &wasmtime_val_t, out: &mut Table, ) -> Option> { handle_result( - init.to_val() + init.to_val(&mut store) .ref_() .ok_or_else(|| anyhow!("wasmtime_table_new init value is not a reference")) .and_then(|init| Table::new(store, tt.ty().ty.clone(), init)), @@ -139,14 +139,14 @@ pub unsafe extern "C" fn wasmtime_table_type( #[no_mangle] pub extern "C" fn wasmtime_table_get( - store: CStoreContextMut<'_>, + mut store: CStoreContextMut<'_>, table: &Table, index: u32, ret: &mut MaybeUninit, ) -> bool { - match table.get(store, index) { + match table.get(&mut store, index) { Some(r) => { - crate::initialize(ret, wasmtime_val_t::from_val(r.into())); + crate::initialize(ret, wasmtime_val_t::from_val(store, r.into())); true } None => false, @@ -155,13 +155,13 @@ pub extern "C" fn wasmtime_table_get( #[no_mangle] pub unsafe extern "C" fn wasmtime_table_set( - store: CStoreContextMut<'_>, + mut store: CStoreContextMut<'_>, table: &Table, index: u32, val: &wasmtime_val_t, ) -> Option> { handle_result( - val.to_val() + val.to_val(&mut store) .ref_() .ok_or_else(|| anyhow!("wasmtime_table_set value is not a reference")) .and_then(|val| table.set(store, index, val)), @@ -176,14 +176,14 @@ pub extern "C" fn wasmtime_table_size(store: CStoreContext<'_>, table: &Table) - #[no_mangle] pub unsafe extern "C" fn wasmtime_table_grow( - store: CStoreContextMut<'_>, + mut store: CStoreContextMut<'_>, table: &Table, delta: u32, val: &wasmtime_val_t, prev_size: &mut u32, ) -> Option> { handle_result( - val.to_val() + val.to_val(&mut store) .ref_() .ok_or_else(|| anyhow!("wasmtime_table_grow value is not a reference")) .and_then(|val| table.grow(store, delta, val)), diff --git a/crates/c-api/src/val.rs b/crates/c-api/src/val.rs index 9c2ac5a0d319..d0bd355c5260 100644 --- a/crates/c-api/src/val.rs +++ b/crates/c-api/src/val.rs @@ -3,10 +3,9 @@ use crate::{ from_valtype, into_valtype, wasm_ref_t, wasm_valkind_t, wasmtime_valkind_t, CStoreContextMut, WASM_I32, }; -use std::ffi::c_void; use std::mem::{self, ManuallyDrop, MaybeUninit}; use std::ptr; -use wasmtime::{ExternRef, Func, HeapType, Ref, Val, ValType}; +use wasmtime::{AsContextMut, Func, HeapType, Ref, Val, ValType}; #[repr(C)] pub struct wasm_val_t { @@ -85,16 +84,7 @@ impl wasm_val_t { kind: from_valtype(&ValType::F64), of: wasm_val_union { u64: f }, }, - Val::ExternRef(r) => wasm_val_t { - kind: from_valtype(&ValType::EXTERNREF), - of: wasm_val_union { - ref_: r.map_or(ptr::null_mut(), |r| { - Box::into_raw(Box::new(wasm_ref_t { - r: Ref::Extern(Some(r)), - })) - }), - }, - }, + Val::ExternRef(_) => crate::abort("externref"), Val::FuncRef(f) => wasm_val_t { kind: from_valtype(&ValType::FUNCREF), of: wasm_val_union { @@ -160,7 +150,6 @@ pub union wasmtime_val_union { pub f32: u32, pub f64: u64, pub funcref: wasmtime_func_t, - pub externref: ManuallyDrop>, pub v128: [u8; 16], } @@ -172,7 +161,10 @@ pub struct wasmtime_func_t { } impl wasmtime_val_t { - pub fn from_val(val: Val) -> wasmtime_val_t { + pub fn from_val(cx: impl AsContextMut, val: Val) -> wasmtime_val_t { + // TODO: Needed for when we re-add externref support. + let _ = cx; + match val { Val::I32(i) => wasmtime_val_t { kind: crate::WASMTIME_I32, @@ -190,12 +182,7 @@ impl wasmtime_val_t { kind: crate::WASMTIME_F64, of: wasmtime_val_union { f64: i }, }, - Val::ExternRef(e) => wasmtime_val_t { - kind: crate::WASMTIME_EXTERNREF, - of: wasmtime_val_union { - externref: ManuallyDrop::new(e), - }, - }, + Val::ExternRef(_) => crate::abort("externref"), Val::FuncRef(func) => wasmtime_val_t { kind: crate::WASMTIME_FUNCREF, of: wasmtime_val_union { @@ -217,7 +204,10 @@ impl wasmtime_val_t { } } - pub unsafe fn to_val(&self) -> Val { + pub unsafe fn to_val(&self, cx: impl AsContextMut) -> Val { + // TODO: needed for when we re-add externref support. + let _ = cx; + match self.kind { crate::WASMTIME_I32 => Val::I32(self.of.i32), crate::WASMTIME_I64 => Val::I64(self.of.i64), @@ -233,7 +223,6 @@ impl wasmtime_val_t { Some(mem::transmute::(self.of.funcref)) }) } - crate::WASMTIME_EXTERNREF => Val::ExternRef((*self.of.externref).clone()), other => panic!("unknown wasmtime_valkind_t: {}", other), } } @@ -241,67 +230,27 @@ impl wasmtime_val_t { impl Drop for wasmtime_val_t { fn drop(&mut self) { - if self.kind == crate::WASMTIME_EXTERNREF { - unsafe { - ManuallyDrop::drop(&mut self.of.externref); - } - } + // TODO: this drop impl is needed for when we re-add externref support. } } #[no_mangle] -pub unsafe extern "C" fn wasmtime_val_delete(val: &mut ManuallyDrop) { +pub unsafe extern "C" fn wasmtime_val_delete( + cx: CStoreContextMut<'_>, + val: &mut ManuallyDrop, +) { + // TODO: needed for when we re-add externref support. + let _ = cx; + ManuallyDrop::drop(val) } #[no_mangle] pub unsafe extern "C" fn wasmtime_val_copy( + mut cx: CStoreContextMut<'_>, dst: &mut MaybeUninit, src: &wasmtime_val_t, ) { - crate::initialize(dst, wasmtime_val_t::from_val(src.to_val())) -} - -#[no_mangle] -pub extern "C" fn wasmtime_externref_new( - data: *mut c_void, - finalizer: Option, -) -> ExternRef { - ExternRef::new(crate::ForeignData { data, finalizer }) -} - -#[no_mangle] -pub extern "C" fn wasmtime_externref_data(externref: ManuallyDrop) -> *mut c_void { - externref - .data() - .downcast_ref::() - .unwrap() - .data -} - -#[no_mangle] -pub extern "C" fn wasmtime_externref_clone(externref: ManuallyDrop) -> ExternRef { - (*externref).clone() -} - -#[no_mangle] -pub extern "C" fn wasmtime_externref_delete(_val: Option) {} - -#[no_mangle] -pub unsafe extern "C" fn wasmtime_externref_to_raw( - cx: CStoreContextMut<'_>, - val: Option>, -) -> *mut c_void { - match val { - Some(ptr) => ptr.to_raw(cx), - None => ptr::null_mut(), - } -} - -#[no_mangle] -pub unsafe extern "C" fn wasmtime_externref_from_raw( - _cx: CStoreContextMut<'_>, - val: *mut c_void, -) -> Option { - ExternRef::from_raw(val) + let val = src.to_val(&mut cx); + crate::initialize(dst, wasmtime_val_t::from_val(cx, val)) } diff --git a/crates/cranelift/src/func_environ.rs b/crates/cranelift/src/func_environ.rs index 57b965c3a15e..b0b0cb5acd14 100644 --- a/crates/cranelift/src/func_environ.rs +++ b/crates/cranelift/src/func_environ.rs @@ -361,20 +361,16 @@ impl<'module_environment> FuncEnvironment<'module_environment> { delta: i64, ) -> ir::Value { debug_assert!(delta == -1 || delta == 1); - let pointer_type = self.pointer_type(); - - // If this changes that's ok, the `atomic_rmw` below just needs to be - // preceded with an add instruction of `externref` and the offset. - assert_eq!(self.offsets.vm_extern_data_ref_count(), 0); - let delta = builder.ins().iconst(pointer_type, delta); - builder.ins().atomic_rmw( - pointer_type, - ir::MemFlags::trusted(), - ir::AtomicRmwOp::Add, - externref, - delta, - ) + let offset = i32::try_from(self.offsets.vm_extern_data_ref_count()).unwrap(); + let count = builder + .ins() + .load(pointer_type, ir::MemFlags::trusted(), externref, offset); + let new_count = builder.ins().iadd_imm(count, delta); + builder + .ins() + .store(ir::MemFlags::trusted(), new_count, externref, offset); + new_count } fn get_global_location( @@ -1643,9 +1639,8 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m ); builder.switch_to_block(dec_ref_count_block); - let prev_ref_count = self.mutate_externref_ref_count(builder, current_elem, -1); - let one = builder.ins().iconst(pointer_type, 1); - let cond = builder.ins().icmp(IntCC::Equal, one, prev_ref_count); + let new_ref_count = self.mutate_externref_ref_count(builder, current_elem, -1); + let cond = builder.ins().icmp_imm(IntCC::Equal, new_ref_count, 0); builder .ins() .brif(cond, drop_block, &[], continue_block, &[]); diff --git a/crates/fuzzing/src/oracles.rs b/crates/fuzzing/src/oracles.rs index 93aa49fdd645..e5543e3ef2dd 100644 --- a/crates/fuzzing/src/oracles.rs +++ b/crates/fuzzing/src/oracles.rs @@ -638,11 +638,11 @@ pub fn table_ops( caller.gc(); } - let a = ExternRef::new(CountDrops(num_dropped.clone())); - let b = ExternRef::new(CountDrops(num_dropped.clone())); - let c = ExternRef::new(CountDrops(num_dropped.clone())); + let a = ExternRef::new(&mut caller, CountDrops(num_dropped.clone())); + let b = ExternRef::new(&mut caller, CountDrops(num_dropped.clone())); + let c = ExternRef::new(&mut caller, CountDrops(num_dropped.clone())); - log::info!("table_ops: make_refs() -> ({:p}, {:p}, {:p})", a, b, c); + log::info!("table_ops: make_refs() -> ({:?}, {:?}, {:?})", a, b, c); expected_drops.fetch_add(3, SeqCst); results[0] = Some(a).into(); @@ -656,39 +656,30 @@ pub fn table_ops( linker .func_wrap("", "take_refs", { let expected_drops = expected_drops.clone(); - move |a: Option, b: Option, c: Option| { - log::info!( - "table_ops: take_refs({}, {}, {})", - a.as_ref().map_or_else( - || format!("{:p}", std::ptr::null::<()>()), - |r| format!("{:p}", *r) - ), - b.as_ref().map_or_else( - || format!("{:p}", std::ptr::null::<()>()), - |r| format!("{:p}", *r) - ), - c.as_ref().map_or_else( - || format!("{:p}", std::ptr::null::<()>()), - |r| format!("{:p}", *r) - ), - ); + move |caller: Caller<'_, StoreLimits>, + a: Option>, + b: Option>, + c: Option>| + -> Result<()> { + log::info!("table_ops: take_refs({a:?}, {b:?}, {c:?})",); // Do the assertion on each ref's inner data, even though it // all points to the same atomic, so that if we happen to // run into a use-after-free bug with one of these refs we // are more likely to trigger a segfault. if let Some(a) = a { - let a = a.data().downcast_ref::().unwrap(); + let a = a.data(&caller)?.downcast_ref::().unwrap(); assert!(a.0.load(SeqCst) <= expected_drops.load(SeqCst)); } if let Some(b) = b { - let b = b.data().downcast_ref::().unwrap(); + let b = b.data(&caller)?.downcast_ref::().unwrap(); assert!(b.0.load(SeqCst) <= expected_drops.load(SeqCst)); } if let Some(c) = c { - let c = c.data().downcast_ref::().unwrap(); + let c = c.data(&caller)?.downcast_ref::().unwrap(); assert!(c.0.load(SeqCst) <= expected_drops.load(SeqCst)); } + Ok(()) } }) .unwrap(); @@ -704,12 +695,15 @@ pub fn table_ops( let func = Func::new(&mut store, func_ty, { let num_dropped = num_dropped.clone(); let expected_drops = expected_drops.clone(); - move |_caller, _params, results| { + move |mut caller, _params, results| { log::info!("table_ops: make_refs"); expected_drops.fetch_add(3, SeqCst); - results[0] = Some(ExternRef::new(CountDrops(num_dropped.clone()))).into(); - results[1] = Some(ExternRef::new(CountDrops(num_dropped.clone()))).into(); - results[2] = Some(ExternRef::new(CountDrops(num_dropped.clone()))).into(); + results[0] = + Some(ExternRef::new(&mut caller, CountDrops(num_dropped.clone()))).into(); + results[1] = + Some(ExternRef::new(&mut caller, CountDrops(num_dropped.clone()))).into(); + results[2] = + Some(ExternRef::new(&mut caller, CountDrops(num_dropped.clone()))).into(); Ok(()) } }); @@ -718,23 +712,31 @@ pub fn table_ops( let instance = linker.instantiate(&mut store, &module).unwrap(); let run = instance.get_func(&mut store, "run").unwrap(); - let args: Vec<_> = (0..ops.num_params) - .map(|_| Val::ExternRef(Some(ExternRef::new(CountDrops(num_dropped.clone()))))) - .collect(); - - // The generated function should always return a trap. The only two - // valid traps are table-out-of-bounds which happens through `table.get` - // and `table.set` generated or an out-of-fuel trap. Otherwise any other - // error is unexpected and should fail fuzzing. - let trap = run - .call(&mut store, &args, &mut []) - .unwrap_err() - .downcast::() - .unwrap(); - - match trap { - Trap::TableOutOfBounds | Trap::OutOfFuel => {} - _ => panic!("unexpected trap: {trap}"), + { + let mut scope = RootScope::new(&mut store); + let args: Vec<_> = (0..ops.num_params) + .map(|_| { + Val::ExternRef(Some(ExternRef::new( + &mut scope, + CountDrops(num_dropped.clone()), + ))) + }) + .collect(); + + // The generated function should always return a trap. The only two + // valid traps are table-out-of-bounds which happens through `table.get` + // and `table.set` generated or an out-of-fuel trap. Otherwise any other + // error is unexpected and should fail fuzzing. + let trap = run + .call(&mut scope, &args, &mut []) + .unwrap_err() + .downcast::() + .unwrap(); + + match trap { + Trap::TableOutOfBounds | Trap::OutOfFuel => {} + _ => panic!("unexpected trap: {trap}"), + } } // Do a final GC after running the Wasm. diff --git a/crates/runtime/src/externref.rs b/crates/runtime/src/externref.rs deleted file mode 100644 index 547a87410b7b..000000000000 --- a/crates/runtime/src/externref.rs +++ /dev/null @@ -1,24 +0,0 @@ -#[cfg(feature = "gc")] -mod gc; -#[cfg(feature = "gc")] -pub use gc::*; - -#[cfg(not(feature = "gc"))] -mod no_gc; -#[cfg(not(feature = "gc"))] -pub use no_gc::*; - -use wasmtime_environ::StackMap; - -/// Used by the runtime to lookup information about a module given a -/// program counter value. -pub trait ModuleInfoLookup { - /// Lookup the module information from a program counter value. - fn lookup(&self, pc: usize) -> Option<&dyn ModuleInfo>; -} - -/// Used by the runtime to query module information. -pub trait ModuleInfo { - /// Lookup the stack map at a program counter value. - fn lookup_stack_map(&self, pc: usize) -> Option<&StackMap>; -} diff --git a/crates/runtime/src/gc.rs b/crates/runtime/src/gc.rs new file mode 100644 index 000000000000..fdae15995bab --- /dev/null +++ b/crates/runtime/src/gc.rs @@ -0,0 +1,82 @@ +#[cfg(feature = "gc")] +mod enabled; +#[cfg(feature = "gc")] +pub use enabled::*; + +#[cfg(not(feature = "gc"))] +mod disabled; +#[cfg(not(feature = "gc"))] +pub use disabled::*; + +use crate::SendSyncPtr; +use std::ptr::NonNull; +use wasmtime_environ::StackMap; + +/// Used by the runtime to lookup information about a module given a +/// program counter value. +pub trait ModuleInfoLookup { + /// Lookup the module information from a program counter value. + fn lookup(&self, pc: usize) -> Option<&dyn ModuleInfo>; +} + +/// Used by the runtime to query module information. +pub trait ModuleInfo { + /// Lookup the stack map at a program counter value. + fn lookup_stack_map(&self, pc: usize) -> Option<&StackMap>; +} + +/// A raw, unrooted GC pointer. +/// +/// We know that the referent is some kind of GC object, but we don't know +/// exactly which kind. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[repr(transparent)] +pub struct VMGcRef(SendSyncPtr); + +impl std::fmt::Pointer for VMGcRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_non_null().fmt(f) + } +} + +impl VMGcRef { + /// The only type of valid `VMGcRef` is currently `VMExternRef`. + /// + /// Assert on this anywhere you are making that assumption, so that we know + /// all the places to update when it no longer holds true. + pub const ONLY_EXTERN_REF_IMPLEMENTED_YET: bool = true; + + /// Create a new `VMGcRef`. + /// + /// Returns `None` for null pointers. + /// + /// # Safety + /// + /// The given pointer must point to a valid GC-managed object. + pub unsafe fn from_ptr(raw: *mut u8) -> Option { + let raw = NonNull::new(raw)?; + Some(Self::from_non_null(raw)) + } + + /// Create a new `VMGcRef`. + /// + /// # Safety + /// + /// The given pointer must point to a valid GC-managed object. + pub unsafe fn from_non_null(raw: NonNull) -> Self { + VMGcRef(SendSyncPtr::new(raw)) + } + + /// Get this GC reference as a pointer. + /// + /// Note that the returned pointer does not point to a valid GC object when + /// `self.is_i31()`. + pub fn as_ptr(&self) -> *mut u8 { + self.0.as_ptr() + } + + /// Get this GC reference as a non-null pointer. + pub fn as_non_null(&self) -> NonNull { + self.0.as_non_null() + } +} diff --git a/crates/runtime/src/externref/no_gc.rs b/crates/runtime/src/gc/disabled.rs similarity index 100% rename from crates/runtime/src/externref/no_gc.rs rename to crates/runtime/src/gc/disabled.rs diff --git a/crates/runtime/src/externref/gc.rs b/crates/runtime/src/gc/enabled.rs similarity index 81% rename from crates/runtime/src/externref/gc.rs rename to crates/runtime/src/gc/enabled.rs index 6574799f386e..9d6df21edf74 100644 --- a/crates/runtime/src/externref/gc.rs +++ b/crates/runtime/src/gc/enabled.rs @@ -99,22 +99,18 @@ //! Examination of Deferred Reference Counting and Cycle Detection* by Quinane: //! -use crate::{Backtrace, ModuleInfoLookup, SendSyncPtr, VMRuntimeLimits}; +use crate::{Backtrace, ModuleInfoLookup, SendSyncPtr, VMGcRef, VMRuntimeLimits}; use std::alloc::Layout; use std::any::Any; -use std::cell::UnsafeCell; +use std::cell::{Cell, UnsafeCell}; use std::cmp; use std::collections::HashSet; use std::hash::{Hash, Hasher}; use std::mem; -use std::ops::Deref; use std::ptr::{self, NonNull}; -use std::sync::atomic::{self, AtomicUsize, Ordering}; /// An external reference to some opaque data. /// -/// `VMExternRef`s dereference to their underlying opaque data as `dyn Any`. -/// /// Unlike the `externref` in the Wasm spec, `VMExternRef`s are non-nullable, /// and always point to a valid value. You may use `Option` to /// represent nullable references, and `Option` is guaranteed to @@ -146,15 +142,15 @@ use std::sync::atomic::{self, AtomicUsize, Ordering}; /// let file = std::fs::File::create("some/file/path")?; /// /// // Wrap the file up as an `VMExternRef` that can be passed to Wasm. -/// let extern_ref_to_file = VMExternRef::new(file); +/// let extern_ref_to_file = unsafe { VMExternRef::new(file) }; /// -/// // `VMExternRef`s dereference to `dyn Any`, so you can use `Any` methods to -/// // perform runtime type checks and downcasts. +/// // Get the underlying `dyn Any` via the `data` method, so you can use `Any` +/// // methods to perform runtime type checks and downcasts. /// -/// assert!(extern_ref_to_file.is::()); -/// assert!(!extern_ref_to_file.is::()); +/// assert!(extern_ref_to_file.data().is::()); +/// assert!(!extern_ref_to_file.data().is::()); /// -/// if let Some(mut file) = extern_ref_to_file.downcast_ref::() { +/// if let Some(mut file) = extern_ref_to_file.data().downcast_ref::() { /// use std::io::Write; /// writeln!(&mut file, "Hello, `VMExternRef`!")?; /// } @@ -163,11 +159,37 @@ use std::sync::atomic::{self, AtomicUsize, Ordering}; /// ``` #[derive(Debug)] #[repr(transparent)] -pub struct VMExternRef(SendSyncPtr); +pub struct VMExternRef(VMGcRef); -impl std::fmt::Pointer for VMExternRef { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Pointer::fmt(&self.0, f) +impl VMGcRef { + /// Get this GC reference as a `VMExternRef`. + pub fn as_extern_ref(&self) -> &VMExternRef { + assert!(Self::ONLY_EXTERN_REF_IMPLEMENTED_YET); + assert_eq!( + std::mem::size_of::(), + std::mem::size_of::() + ); + assert_eq!( + std::mem::align_of::(), + std::mem::align_of::() + ); + let ptr = self as *const VMGcRef; + unsafe { &*(ptr.cast::()) } + } + + /// Get this GC reference as a mutable `VMExternRef`. + pub fn as_extern_ref_mut(&mut self) -> &mut VMExternRef { + assert!(Self::ONLY_EXTERN_REF_IMPLEMENTED_YET); + assert_eq!( + std::mem::size_of::(), + std::mem::size_of::() + ); + assert_eq!( + std::mem::align_of::(), + std::mem::align_of::() + ); + let ptr = self as *mut VMGcRef; + unsafe { &mut *(ptr.cast::()) } } } @@ -180,19 +202,30 @@ pub(crate) struct VMExternData { // /// The reference count for this `VMExternData` and value. When it reaches /// zero, we can safely destroy the value and free this heap - /// allocation. This is an `UnsafeCell`, rather than plain `Cell`, because - /// it can be modified by compiled Wasm code. + /// allocation. This field can be modified by compiled Wasm code. /// /// Note: this field's offset must be kept in sync with /// `wasmtime_environ::VMOffsets::vm_extern_data_ref_count()` which is /// currently always zero. - ref_count: AtomicUsize, + ref_count: Cell, /// Always points to the implicit, dynamically-sized `value` member that /// precedes this `VMExternData`. value_ptr: SendSyncPtr, } +// It is up to `VMExternRef` users to uphold that instances are used in a way +// that is `Send`/`Sync`. This is part of the unsafe contract of +// `VMExternRef::new`. +unsafe impl Send for VMExternRef {} +unsafe impl Sync for VMExternRef {} + +impl std::fmt::Pointer for VMExternRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Pointer::fmt(&self.0, f) + } +} + impl Clone for VMExternRef { #[inline] fn clone(&self) -> VMExternRef { @@ -206,17 +239,15 @@ impl Drop for VMExternRef { fn drop(&mut self) { let data = self.extern_data(); - // Note that the memory orderings here also match the standard library - // itself. Documentation is more available in the implementation of - // `Arc`, but the general idea is that this is a special pattern allowed - // by the C standard with atomic orderings where we "release" for all - // the decrements and only the final decrementer performs an acquire - // fence. This properly ensures that the final thread, which actually - // destroys the data, sees all the updates from all other threads. - if data.ref_count.fetch_sub(1, Ordering::Release) != 1 { + debug_assert!(data.ref_count.get() > 0); + data.ref_count.set(data.ref_count.get() - 1); + log::trace!( + "Decrementing ref count of externref @ 0x{data:p} -> {}", + data.ref_count.get() + ); + if data.ref_count.get() > 0 { return; } - atomic::fence(Ordering::Acquire); unsafe { VMExternData::drop_and_dealloc(self.0); @@ -254,15 +285,19 @@ impl VMExternData { } /// Drop the inner value and then free this `VMExternData` heap allocation. - pub(crate) unsafe fn drop_and_dealloc(mut data: SendSyncPtr) { - log::trace!("Dropping externref data @ {:p}", data); + /// + /// The given `gc_ref` must point to a valid `VMExternData`. + pub(crate) unsafe fn drop_and_dealloc(gc_ref: VMGcRef) { + log::trace!("Dropping externref data @ {gc_ref:#p}"); + assert!(VMGcRef::ONLY_EXTERN_REF_IMPLEMENTED_YET); + let mut data = gc_ref.0.cast::(); // Note: we introduce a block scope so that we drop the live // reference to the data before we free the heap allocation it // resides within after this block. let (alloc_ptr, layout) = { let data = data.as_mut(); - debug_assert_eq!(data.ref_count.load(Ordering::SeqCst), 0); + debug_assert_eq!(data.ref_count.get(), 0); // Same thing, but for the dropping the reference to `value` before // we drop it itself. @@ -283,14 +318,12 @@ impl VMExternData { #[inline] fn increment_ref_count(&self) { - // This is only using during cloning operations, and like the standard - // library we use `Relaxed` here. The rationale is better documented in - // libstd's implementation of `Arc`, but the general gist is that we're - // creating a new pointer for our own thread, so there's no need to have - // any synchronization with orderings. The synchronization with other - // threads with respect to orderings happens when the pointer is sent to - // another thread. - self.ref_count.fetch_add(1, Ordering::Relaxed); + self.ref_count.set(self.ref_count.get() + 1); + log::trace!( + "Incrementing ref count of externref @ 0x{:p} -> {}", + self as *const _, + self.ref_count.get(), + ); } } @@ -303,7 +336,12 @@ fn round_up_to_align(n: usize, align: usize) -> Option { impl VMExternRef { /// Wrap the given value inside an `VMExternRef`. - pub fn new(value: T) -> VMExternRef + /// + /// # Safety + /// + /// Callers are responsible for ensuring that the `VMExternRef` is used in a + /// thread-safe way, because the internal reference counting is not atomic. + pub unsafe fn new(value: T) -> VMExternRef where T: 'static + Any + Send + Sync, { @@ -311,7 +349,12 @@ impl VMExternRef { } /// Construct a new `VMExternRef` in place by invoking `make_value`. - pub fn new_with(make_value: impl FnOnce() -> T) -> VMExternRef + /// + /// # Safety + /// + /// Callers are responsible for ensuring that the `VMExternRef` is used in a + /// thread-safe way, because the internal reference counting is not atomic. + pub unsafe fn new_with(make_value: impl FnOnce() -> T) -> VMExternRef where T: 'static + Any + Send + Sync, { @@ -327,36 +370,103 @@ impl VMExternRef { let value_ptr = alloc_ptr.cast::(); ptr::write(value_ptr.as_ptr(), make_value()); - let extern_data_ptr = - alloc_ptr.cast::().as_ptr().add(footer_offset) as *mut VMExternData; + let extern_data_ptr = alloc_ptr + .cast::() + .as_ptr() + .add(footer_offset) + .cast::(); ptr::write( extern_data_ptr, VMExternData { - ref_count: AtomicUsize::new(1), + ref_count: Cell::new(1), // Cast from `*mut T` to `*mut dyn Any` here. value_ptr: SendSyncPtr::new(NonNull::new_unchecked(value_ptr.as_ptr())), }, ); log::trace!("New externref data @ {:p}", extern_data_ptr); - VMExternRef(NonNull::new_unchecked(extern_data_ptr).into()) + VMExternRef(VMGcRef::from_non_null(NonNull::new_unchecked( + extern_data_ptr.cast::(), + ))) } } - /// Turn this `VMExternRef` into a raw, untyped pointer. + /// Turn this `VMExternRef` into a its underlying GC reference. + /// + /// Unlike `into_gc_ref`, this does not consume and forget `self`. It is *not* + /// safe to use `from_gc_ref` on pointers returned from this method; only use + /// `clone_from_gc_ref`! + /// + /// Nor does this method increment the reference count. You must ensure that + /// `self` (or some other clone of `self`) stays alive until + /// `clone_from_gc_ref` is called. + #[inline] + pub fn as_gc_ref(&self) -> &VMGcRef { + &self.0 + } + + /// Consume this `VMExternRef` into a raw, untyped pointer. + /// + /// # Safety + /// + /// This method forgets self, so it is possible to create a leak of the + /// underlying reference counted data if not used carefully. + /// + /// Use `from_gc_ref` to recreate the `VMExternRef`. + pub unsafe fn into_gc_ref(self) -> VMGcRef { + let gc_ref = self.0; + std::mem::forget(self); + gc_ref + } + + /// Recreate a `VMExternRef` from a pointer returned from a previous call to + /// `{as,into}_gc_ref`. + /// + /// # Safety + /// + /// The given `gc_ref` must point to a valid `VMExternData`. + /// + /// Unlike `clone_from_gc_ref`, this does not increment the reference count + /// of the underlying data. It is not safe to continue to use the pointer + /// passed to this function. + #[inline] + pub unsafe fn from_gc_ref(gc_ref: VMGcRef) -> Self { + assert!(VMGcRef::ONLY_EXTERN_REF_IMPLEMENTED_YET); + VMExternRef(gc_ref) + } + + /// Recreate a `VMExternRef` from a pointer returned from a previous call to + /// `{as,into}_gc_ref`. + /// + /// # Safety + /// + /// Wildly unsafe to use with anything other than the result of a previous + /// `{as,into}_gc_ref` call! + /// + /// Additionally, it is your responsibility to ensure that this raw + /// `VMExternRef` is still valid, or else you could have use-after-free + /// bugs. + #[inline] + pub unsafe fn clone_from_gc_ref(gc_ref: VMGcRef) -> Self { + assert!(VMGcRef::ONLY_EXTERN_REF_IMPLEMENTED_YET); + let x = VMExternRef(gc_ref); + x.extern_data().increment_ref_count(); + x + } + + /// Get a raw pointer for this `VMExternRef`. /// /// Unlike `into_raw`, this does not consume and forget `self`. It is *not* /// safe to use `from_raw` on pointers returned from this method; only use /// `clone_from_raw`! /// - /// Nor does this method increment the reference count. You must ensure - /// that `self` (or some other clone of `self`) stays alive until - /// `clone_from_raw` is called. + /// Nor does this method increment the reference count. You must ensure that + /// `self` (or some other clone of `self`) stays alive until + /// `clone_from_raw` is called. #[inline] pub fn as_raw(&self) -> *mut u8 { - let ptr = self.0.as_ptr().cast::(); - ptr + self.0.as_ptr() } /// Consume this `VMExternRef` into a raw, untyped pointer. @@ -368,53 +478,55 @@ impl VMExternRef { /// /// Use `from_raw` to recreate the `VMExternRef`. pub unsafe fn into_raw(self) -> *mut u8 { - let ptr = self.0.as_ptr().cast::(); + let raw = self.0; std::mem::forget(self); - ptr + raw.as_ptr() } /// Recreate a `VMExternRef` from a pointer returned from a previous call to - /// `as_raw`. + /// `{as,into}_raw`. /// /// # Safety /// - /// Unlike `clone_from_raw`, this does not increment the reference count of the - /// underlying data. It is not safe to continue to use the pointer passed to this - /// function. + /// The given `raw` must point to a valid `VMExternData`. + /// + /// Unlike `clone_from_raw`, this does not increment the reference count of + /// the underlying data. It is not safe to continue to use the pointer + /// passed to this function. #[inline] - pub unsafe fn from_raw(ptr: *mut u8) -> Option { - Some(VMExternRef(NonNull::new(ptr)?.cast().into())) + pub unsafe fn from_raw(raw: *mut u8) -> Option { + VMGcRef::from_ptr(raw).map(|r| Self::from_gc_ref(r)) } /// Recreate a `VMExternRef` from a pointer returned from a previous call to - /// `as_raw`. + /// `{as,into}_raw`. /// /// # Safety /// /// Wildly unsafe to use with anything other than the result of a previous - /// `as_raw` call! + /// `{as,into}_raw` call! /// /// Additionally, it is your responsibility to ensure that this raw - /// `VMExternRef`'s reference count has not dropped to zero. Failure to do - /// so will result in use after free! + /// `VMExternRef` is still valid, or else you could have use-after-free + /// bugs. #[inline] - pub unsafe fn clone_from_raw(ptr: *mut u8) -> Option { - let x = VMExternRef(NonNull::new(ptr)?.cast::().into()); - x.extern_data().increment_ref_count(); - Some(x) + pub unsafe fn clone_from_raw(raw: *mut u8) -> Option { + VMGcRef::from_ptr(raw).map(|r| Self::clone_from_gc_ref(r)) } /// Get the strong reference count for this `VMExternRef`. - /// - /// Note that this loads with a `SeqCst` ordering to synchronize with other - /// threads. pub fn strong_count(&self) -> usize { - self.extern_data().ref_count.load(Ordering::SeqCst) + self.extern_data().ref_count.get() } #[inline] fn extern_data(&self) -> &VMExternData { - unsafe { self.0.as_ref() } + unsafe { self.0.as_non_null().cast().as_ref() } + } + + #[inline] + fn extern_data_mut(&mut self) -> &mut VMExternData { + unsafe { self.0.as_non_null().cast().as_mut() } } } @@ -457,14 +569,21 @@ impl VMExternRef { let b = b.0.as_ptr() as usize; a.cmp(&b) } -} - -impl Deref for VMExternRef { - type Target = dyn Any; - fn deref(&self) -> &dyn Any { + /// Get the underlying host data. + pub fn data(&self) -> &(dyn Any + Send + Sync) { unsafe { self.extern_data().value_ptr.as_ref() } } + + /// Get a mutable borrow of the underlying host data. + /// + /// # Safety + /// + /// It is up to the caller to ensure that there are not any other active + /// borrows of the data. + pub unsafe fn data_mut(&mut self) -> &mut (dyn Any + Send + Sync) { + unsafe { self.extern_data_mut().value_ptr.as_mut() } + } } /// A wrapper around a `VMExternRef` that implements `Eq` and `Hash` with @@ -708,9 +827,9 @@ impl VMExternRefActivationsTable { fn insert_precise_stack_root( precise_stack_roots: &mut HashSet, - root: NonNull, + root: VMGcRef, ) { - let root = unsafe { VMExternRef::clone_from_raw(root.as_ptr().cast()).unwrap() }; + let root = root.as_extern_ref().clone(); log::trace!("Found externref on stack: {:p}", root); precise_stack_roots.insert(VMExternRefWithTraits(root)); } @@ -872,7 +991,7 @@ pub unsafe fn gc( let mut activations_table_set: DebugOnly> = Default::default(); if cfg!(debug_assertions) { externref_activations_table.elements(|elem| { - activations_table_set.insert(elem.as_raw() as *mut VMExternData); + activations_table_set.insert(*elem.as_gc_ref()); }); } @@ -920,22 +1039,21 @@ pub unsafe fn gc( continue; } - let stack_slot = stack_slot as *const *mut VMExternData; + let stack_slot = stack_slot as *const *mut u8; let r = std::ptr::read(stack_slot); log::trace!("Stack slot @ {:p} = {:p}", stack_slot, r); - debug_assert!( - r.is_null() || activations_table_set.contains(&r), - "every on-stack externref inside a Wasm frame should \ - have an entry in the VMExternRefActivationsTable; \ - {:?} is not in the table", - r - ); + if let Some(gc_ref) = VMGcRef::from_ptr(r) { + debug_assert!( + activations_table_set.contains(&gc_ref), + "every on-stack externref inside a Wasm frame should \ + have an entry in the VMExternRefActivationsTable; \ + {gc_ref:?} is not in the table", + ); - if let Some(r) = NonNull::new(r) { VMExternRefActivationsTable::insert_precise_stack_root( &mut externref_activations_table.precise_stack_roots, - r, + gc_ref, ); } } @@ -975,7 +1093,7 @@ mod tests { let s: *mut (dyn Any + Send + Sync) = s as _; let extern_data = VMExternData { - ref_count: AtomicUsize::new(0), + ref_count: Cell::new(0), value_ptr: NonNull::new(s).unwrap().into(), }; diff --git a/crates/runtime/src/instance.rs b/crates/runtime/src/instance.rs index 9ad701432c04..1dbaa1b7b9c5 100644 --- a/crates/runtime/src/instance.rs +++ b/crates/runtime/src/instance.rs @@ -3,7 +3,7 @@ //! `InstanceHandle` is a reference-counting handle for an `Instance`. use crate::export::Export; -use crate::externref::VMExternRefActivationsTable; +use crate::gc::VMExternRefActivationsTable; use crate::memory::{Memory, RuntimeMemoryCreator}; use crate::table::{Table, TableElement, TableElementType}; use crate::vmcontext::{ diff --git a/crates/runtime/src/lib.rs b/crates/runtime/src/lib.rs index 46ec141ff7ed..ba92324b8845 100644 --- a/crates/runtime/src/lib.rs +++ b/crates/runtime/src/lib.rs @@ -14,7 +14,7 @@ mod arch; #[cfg(feature = "component-model")] pub mod component; mod export; -mod externref; +mod gc; mod imports; mod instance; mod memory; @@ -38,7 +38,7 @@ pub use wasmtime_jit_debug::gdb_jit_int::GdbJitImageRegistration; pub use crate::arch::{get_stack_pointer, V128Abi}; pub use crate::export::*; -pub use crate::externref::*; +pub use crate::gc::*; pub use crate::imports::Imports; pub use crate::instance::{ Instance, InstanceAllocationRequest, InstanceAllocator, InstanceAllocatorImpl, InstanceHandle, diff --git a/crates/runtime/src/libcalls.rs b/crates/runtime/src/libcalls.rs index 1e2d203a6b85..dce1e25f544f 100644 --- a/crates/runtime/src/libcalls.rs +++ b/crates/runtime/src/libcalls.rs @@ -54,7 +54,7 @@ //! } //! ``` -use crate::externref::VMExternRef; +use crate::gc::VMExternRef; use crate::table::{Table, TableElementType}; use crate::vmcontext::VMFuncRef; use crate::{Instance, TrapReason}; @@ -382,9 +382,11 @@ unsafe fn table_get_lazy_init_func_ref( // Drop a `VMExternRef`. #[cfg(feature = "gc")] unsafe fn drop_externref(_instance: &mut Instance, externref: *mut u8) { - let externref = externref as *mut crate::externref::VMExternData; - let externref = std::ptr::NonNull::new(externref).unwrap().into(); - crate::externref::VMExternData::drop_and_dealloc(externref); + use crate::VMGcRef; + + let non_null = std::ptr::NonNull::new(externref).unwrap(); + let gc_ref = VMGcRef::from_non_null(non_null); + crate::gc::VMExternData::drop_and_dealloc(gc_ref); } // Do a GC and insert the given `externref` into the diff --git a/crates/runtime/src/send_sync_ptr.rs b/crates/runtime/src/send_sync_ptr.rs index 8ab8d1aaf827..7dcea5815454 100644 --- a/crates/runtime/src/send_sync_ptr.rs +++ b/crates/runtime/src/send_sync_ptr.rs @@ -1,4 +1,5 @@ use std::fmt; +use std::hash::Hash; use std::ptr::NonNull; /// A helper type in Wasmtime to store a raw pointer to `T` while automatically @@ -42,6 +43,12 @@ impl SendSyncPtr { pub fn as_non_null(&self) -> NonNull { self.0 } + + /// Cast this to a pointer to a `U`. + #[inline] + pub fn cast(&self) -> SendSyncPtr { + SendSyncPtr(self.0.cast::()) + } } impl SendSyncPtr<[T]> { @@ -91,3 +98,9 @@ impl PartialEq for SendSyncPtr { } impl Eq for SendSyncPtr {} + +impl Hash for SendSyncPtr { + fn hash(&self, state: &mut H) { + self.as_ptr().hash(state); + } +} diff --git a/crates/runtime/src/table.rs b/crates/runtime/src/table.rs index b0767073956b..0731280c1b2e 100644 --- a/crates/runtime/src/table.rs +++ b/crates/runtime/src/table.rs @@ -4,7 +4,7 @@ #![cfg_attr(feature = "gc", allow(irrefutable_let_patterns))] -use crate::externref::VMExternRef; +use crate::gc::VMExternRef; use crate::vmcontext::{VMFuncRef, VMTableDefinition}; use crate::{SendSyncPtr, Store}; use anyhow::{bail, format_err, Error, Result}; diff --git a/crates/runtime/src/vmcontext.rs b/crates/runtime/src/vmcontext.rs index 5b03966c11f0..c7c857e80eb1 100644 --- a/crates/runtime/src/vmcontext.rs +++ b/crates/runtime/src/vmcontext.rs @@ -3,7 +3,7 @@ mod vm_host_func_context; -use crate::externref::VMExternRef; +use crate::gc::VMExternRef; use sptr::Strict; use std::cell::UnsafeCell; use std::ffi::c_void; @@ -422,7 +422,7 @@ mod test_vmglobal_definition { #[test] #[cfg(feature = "gc")] fn check_vmglobal_can_contain_externref() { - use crate::externref::VMExternRef; + use crate::gc::VMExternRef; assert!(size_of::() <= size_of::()); } } diff --git a/crates/slab/src/lib.rs b/crates/slab/src/lib.rs index f54adbce57ed..5d840bf720e8 100644 --- a/crates/slab/src/lib.rs +++ b/crates/slab/src/lib.rs @@ -411,6 +411,26 @@ impl Slab { } } + /// Deallocate the value associated with the given `id`. + /// + /// If `id` comes from a different `Slab` instance, this method may panic, + /// do nothing, or deallocate an arbitrary value. + #[inline] + pub fn dealloc(&mut self, id: Id) { + match self + .entries + .get_mut(id.0.index()) + .expect("id from a different slab") + { + Entry::Free { .. } => panic!("attempt to deallocate an entry that is already vacant"), + e @ Entry::Occupied(_) => { + let next_free = std::mem::replace(&mut self.free, Some(id.0)); + *e = Entry::Free { next_free }; + self.len -= 1; + } + } + } + /// Iterate over all values currently allocated within this `Slab`. /// /// Yields pairs of an `Id` and the `Id`'s associated value. @@ -428,23 +448,41 @@ impl Slab { }) } - /// Deallocate the value associated with the given `id`. + /// Mutably iterate over all values currently allocated within this `Slab`. /// - /// If `id` comes from a different `Slab` instance, this method may panic, - /// do nothing, or deallocate an arbitrary value. + /// Yields pairs of an `Id` and the `Id`'s associated value. + /// + /// Iteration order is undefined. #[inline] - pub fn dealloc(&mut self, id: Id) { - match self - .entries - .get_mut(id.0.index()) - .expect("id from a different slab") - { - Entry::Free { .. } => panic!("attempt to deallocate an entry that is already vacant"), - e @ Entry::Occupied(_) => { - let next_free = std::mem::replace(&mut self.free, Some(id.0)); - *e = Entry::Free { next_free }; - self.len -= 1; - } - } + pub fn iter_mut(&mut self) -> impl Iterator + '_ { + assert!(self.entries.len() <= Self::MAX_CAPACITY); + self.entries + .iter_mut() + .enumerate() + .filter_map(|(i, e)| match e { + Entry::Occupied(x) => Some((Id(EntryIndex::new(i)), x)), + Entry::Free { .. } => None, + }) + } + + /// Iterate over and remove all entries in this slab. + /// + /// The slab will be empty after calling this method. + /// + /// Yields pairs of an `Id` and the `Id`'s associated value. + /// + /// Iteration order is undefined. + #[inline] + pub fn drain(&mut self) -> impl Iterator + '_ { + assert!(self.entries.len() <= Self::MAX_CAPACITY); + self.len = 0; + self.free = None; + self.entries + .drain(..) + .enumerate() + .filter_map(|(i, e)| match e { + Entry::Occupied(x) => Some((Id(EntryIndex::new(i)), x)), + Entry::Free { .. } => None, + }) } } diff --git a/crates/wasmtime/src/runtime.rs b/crates/wasmtime/src/runtime.rs index db012948fbbb..69ed3c91b2a1 100644 --- a/crates/wasmtime/src/runtime.rs +++ b/crates/wasmtime/src/runtime.rs @@ -7,6 +7,7 @@ pub(crate) mod code; pub(crate) mod code_memory; pub(crate) mod debug; pub(crate) mod externals; +pub(crate) mod gc; pub(crate) mod instance; pub(crate) mod instantiate; pub(crate) mod limits; @@ -14,7 +15,6 @@ pub(crate) mod linker; pub(crate) mod memory; pub(crate) mod module; pub(crate) mod profiling; -pub(crate) mod r#ref; pub(crate) mod resources; pub(crate) mod store; pub(crate) mod trampoline; @@ -49,13 +49,13 @@ cfg_if::cfg_if! { pub use code_memory::CodeMemory; pub use externals::*; pub use func::*; +pub use gc::*; pub use instance::{Instance, InstancePre}; pub use instantiate::CompiledModule; pub use limits::*; pub use linker::*; pub use memory::*; pub use module::{Module, ModuleExport}; -pub use r#ref::ExternRef; pub use resources::*; #[cfg(feature = "async")] pub use store::CallHookHandler; diff --git a/crates/wasmtime/src/runtime/externals/global.rs b/crates/wasmtime/src/runtime/externals/global.rs index d191e3f071b7..18f02840b662 100644 --- a/crates/wasmtime/src/runtime/externals/global.rs +++ b/crates/wasmtime/src/runtime/externals/global.rs @@ -1,5 +1,5 @@ use crate::{ - store::{StoreData, StoreOpaque, Stored}, + store::{AutoAssertNoGc, StoreData, StoreOpaque, Stored}, trampoline::generate_global_export, AsContext, AsContextMut, ExternRef, Func, GlobalType, HeapType, Mutability, Ref, Val, ValType, }; @@ -77,7 +77,7 @@ impl Global { "type mismatch: initial value provided does not match the type of this global", )?; unsafe { - let wasmtime_export = generate_global_export(store, ty, val); + let wasmtime_export = generate_global_export(store, ty, val)?; Ok(Global::from_wasmtime_global(wasmtime_export, store)) } } @@ -88,7 +88,10 @@ impl Global { /// /// Panics if `store` does not own this global. pub fn ty(&self, store: impl AsContext) -> GlobalType { - let store = store.as_context(); + self._ty(store.as_context().0) + } + + pub(crate) fn _ty(&self, store: &StoreOpaque) -> GlobalType { let ty = &store[self.0].global; GlobalType::from_wasmtime_global(store.engine(), &ty) } @@ -101,8 +104,9 @@ impl Global { pub fn get(&self, mut store: impl AsContextMut) -> Val { unsafe { let store = store.as_context_mut(); + let mut store = AutoAssertNoGc::new(store.0); let definition = &*store[self.0].definition; - match self.ty(&store).content() { + match self._ty(&store).content() { ValType::I32 => Val::from(*definition.as_i32()), ValType::I64 => Val::from(*definition.as_i64()), ValType::F32 => Val::F32(*definition.as_u32()), @@ -111,7 +115,7 @@ impl Global { ValType::Ref(ref_ty) => { let reference = match ref_ty.heap_type() { HeapType::Func | HeapType::Concrete(_) => { - Ref::Func(Func::from_raw(store, definition.as_func_ref().cast())) + Ref::Func(Func::_from_raw(&mut store, definition.as_func_ref().cast())) } HeapType::NoFunc => Ref::Func(None), @@ -120,7 +124,7 @@ impl Global { definition .as_externref() .clone() - .map(|inner| ExternRef::from_vm_extern_ref(inner)), + .map(|inner| ExternRef::from_vm_extern_ref(&mut store, inner)), ), }; debug_assert!( @@ -145,12 +149,12 @@ impl Global { /// /// Panics if `store` does not own this global. pub fn set(&self, mut store: impl AsContextMut, val: Val) -> Result<()> { - let store = store.as_context_mut().0; - let global_ty = self.ty(&store); + let mut store = AutoAssertNoGc::new(store.as_context_mut().0); + let global_ty = self._ty(&store); if global_ty.mutability() != Mutability::Var { bail!("immutable global cannot be set"); } - val.ensure_matches_ty(store, global_ty.content()) + val.ensure_matches_ty(&store, global_ty.content()) .context("type mismatch: attempt to set global to value of wrong type")?; unsafe { let definition = &mut *store[self.0].definition; @@ -161,16 +165,18 @@ impl Global { Val::F64(f) => *definition.as_u64_mut() = f, Val::V128(i) => *definition.as_u128_mut() = i.into(), Val::FuncRef(f) => { - *definition.as_func_ref_mut() = - f.map_or(ptr::null_mut(), |f| f.vm_func_ref(store).as_ptr().cast()); + *definition.as_func_ref_mut() = f.map_or(ptr::null_mut(), |f| { + f.vm_func_ref(&mut store).as_ptr().cast() + }); } Val::ExternRef(e) => { + let new = match e { + None => None, + Some(e) => Some(e.try_to_vm_extern_ref(&mut store)?), + }; // Take care to invoke the `Drop` implementation of the - // existing `ExternRef` so that it doesn't leak. - let old = mem::replace( - definition.as_externref_mut(), - e.map(|e| e.into_vm_extern_ref()), - ); + // existing `VMExternRef` so that it doesn't leak. + let old = mem::replace(definition.as_externref_mut(), new); drop(old); } } diff --git a/crates/wasmtime/src/runtime/externals/table.rs b/crates/wasmtime/src/runtime/externals/table.rs index 247e39e98413..626a39268df9 100644 --- a/crates/wasmtime/src/runtime/externals/table.rs +++ b/crates/wasmtime/src/runtime/externals/table.rs @@ -1,4 +1,4 @@ -use crate::store::{StoreData, StoreOpaque, Stored}; +use crate::store::{AutoAssertNoGc, StoreData, StoreOpaque, Stored}; use crate::trampoline::generate_table_export; use crate::{AsContext, AsContextMut, ExternRef, Func, Ref, TableType}; use anyhow::{anyhow, bail, Context, Result}; @@ -146,12 +146,12 @@ impl Table { /// /// Panics if `store` does not own this table. pub fn get(&self, mut store: impl AsContextMut, index: u32) -> Option { - let store = store.as_context_mut().0; - let table = self.wasmtime_table(store, std::iter::once(index)); + let mut store = AutoAssertNoGc::new(store.as_context_mut().0); + let table = self.wasmtime_table(&mut store, std::iter::once(index)); unsafe { match (*table).get(index)? { runtime::TableElement::FuncRef(f) => { - let func = Func::from_vm_func_ref(store, f); + let func = Func::from_vm_func_ref(&mut store, f); Some(func.into()) } @@ -163,7 +163,7 @@ impl Table { #[cfg_attr(not(feature = "gc"), allow(unreachable_code, unused_variables))] runtime::TableElement::ExternRef(Some(x)) => { - let x = ExternRef::from_vm_extern_ref(x); + let x = ExternRef::from_vm_extern_ref(&mut store, x); Some(x.into()) } } @@ -399,7 +399,8 @@ mod tests { // That said, they really point to the same table. assert!(t1.get(&mut store, 0).unwrap().unwrap_extern().is_none()); assert!(t2.get(&mut store, 0).unwrap().unwrap_extern().is_none()); - t1.set(&mut store, 0, Ref::Extern(Some(ExternRef::new(42))))?; + let e = ExternRef::new(&mut store, 42); + t1.set(&mut store, 0, e.into())?; assert!(t1.get(&mut store, 0).unwrap().unwrap_extern().is_some()); assert!(t2.get(&mut store, 0).unwrap().unwrap_extern().is_some()); diff --git a/crates/wasmtime/src/runtime/func.rs b/crates/wasmtime/src/runtime/func.rs index dd539c819e7f..4bdaa4506092 100644 --- a/crates/wasmtime/src/runtime/func.rs +++ b/crates/wasmtime/src/runtime/func.rs @@ -1,5 +1,5 @@ use crate::runtime::Uninhabited; -use crate::store::{StoreData, StoreOpaque, Stored}; +use crate::store::{AutoAssertNoGc, StoreData, StoreOpaque, Stored}; use crate::type_registry::RegisteredType; use crate::{ AsContext, AsContextMut, CallHook, Engine, Extern, FuncType, Instance, Module, Ref, @@ -1055,7 +1055,11 @@ impl Func { /// caller must guarantee that `raw` is owned by the `store` provided and is /// valid within the `store`. pub unsafe fn from_raw(mut store: impl AsContextMut, raw: *mut c_void) -> Option { - Func::from_vm_func_ref(store.as_context_mut().0, raw.cast()) + Self::_from_raw(store.as_context_mut().0, raw) + } + + pub(crate) unsafe fn _from_raw(store: &mut StoreOpaque, raw: *mut c_void) -> Option { + Func::from_vm_func_ref(store, raw.cast()) } /// Extracts the raw value of this `Func`, which is owned by `store`. @@ -1181,7 +1185,7 @@ impl Func { values_vec.resize_with(values_vec_size, || ValRaw::i32(0)); for (arg, slot) in params.iter().cloned().zip(&mut values_vec) { unsafe { - *slot = arg.to_raw(&mut *store); + *slot = arg.to_raw(&mut *store)?; } } @@ -1330,7 +1334,7 @@ impl Func { ret.ensure_matches_ty(caller.store.0, &ty) .context("function attempted to return an incompatible value")?; unsafe { - values_vec[i] = ret.to_raw(&mut caller.store); + values_vec[i] = ret.to_raw(&mut caller.store)?; } } @@ -1639,7 +1643,7 @@ fn exit_wasm(store: &mut StoreContextMut<'_, T>, prev_stack: Option) { pub unsafe trait WasmRet { // Same as `WasmTy::Abi`. #[doc(hidden)] - type Abi: Copy; + type Abi: 'static + Copy; #[doc(hidden)] type Retptr: Copy; @@ -1656,7 +1660,7 @@ pub unsafe trait WasmRet { #[doc(hidden)] unsafe fn into_abi_for_ret( self, - store: &mut StoreOpaque, + store: &mut AutoAssertNoGc<'_>, ptr: Self::Retptr, ) -> Result; @@ -1689,8 +1693,12 @@ where ::compatible_with_store(self, store) } - unsafe fn into_abi_for_ret(self, store: &mut StoreOpaque, _retptr: ()) -> Result { - Ok(::into_abi(self, store)) + unsafe fn into_abi_for_ret( + self, + store: &mut AutoAssertNoGc<'_>, + _retptr: (), + ) -> Result { + ::into_abi(self, store) } fn func_type(engine: &Engine, params: impl Iterator) -> FuncType { @@ -1727,7 +1735,7 @@ where unsafe fn into_abi_for_ret( self, - store: &mut StoreOpaque, + store: &mut AutoAssertNoGc<'_>, retptr: Self::Retptr, ) -> Result { self.and_then(|val| val.into_abi_for_ret(store, retptr)) @@ -1769,9 +1777,13 @@ macro_rules! impl_wasm_host_results { } #[inline] - unsafe fn into_abi_for_ret(self, _store: &mut StoreOpaque, ptr: Self::Retptr) -> Result { + unsafe fn into_abi_for_ret( + self, + _store: &mut AutoAssertNoGc<'_>, + ptr: Self::Retptr, + ) -> Result { let ($($t,)*) = self; - let abi = ($($t.into_abi(_store),)*); + let abi = ($($t.into_abi(_store)?,)*); Ok(<($($t::Abi,)*) as HostAbi>::into_abi(abi, ptr)) } @@ -1952,14 +1964,30 @@ pub struct Caller<'a, T> { } impl Caller<'_, T> { - unsafe fn with(caller: *mut VMContext, f: impl FnOnce(Caller<'_, T>) -> R) -> R { + unsafe fn with(caller: *mut VMContext, f: F) -> R + where + // The closure must be valid for any `Caller` it is given; it doesn't + // get to choose the `Caller`'s lifetime. + F: for<'a> FnOnce(Caller<'a, T>) -> R, + // And the return value must not borrow from the caller/store. + R: 'static, + { assert!(!caller.is_null()); wasmtime_runtime::Instance::from_vmctx(caller, |instance| { let store = StoreContextMut::from_raw(instance.store()); - f(Caller { + let gc_lifo_scope = store.0.gc_roots().enter_lifo_scope(); + + let ret = f(Caller { store, caller: &instance, - }) + }); + + // Safe to recreate a mutable borrow of the store because `ret` + // cannot be borrowing from the store. + let store = StoreContextMut::::from_raw(instance.store()); + store.0.gc_roots_mut().exit_lifo_scope(gc_lifo_scope); + + ret }) } @@ -2157,7 +2185,12 @@ macro_rules! impl_into_func { if let Err(trap) = caller.store.0.call_hook(CallHook::CallingHost) { return R::fallible_from_error(trap); } - $(let $args = $args::from_abi($args, caller.store.0);)* + + let mut store = AutoAssertNoGc::new(caller.store.0); + $(let $args = $args::from_abi($args, &mut store);)* + let _ = &mut store; + drop(store); + let r = func( caller.sub_caller(), $( $args, )* @@ -2185,7 +2218,8 @@ macro_rules! impl_into_func { if !ret.compatible_with_store(caller.store.0) { CallResult::Trap(anyhow::anyhow!("host function attempted to return cross-`Store` value to Wasm")) } else { - match ret.into_abi_for_ret(caller.store.0, retptr) { + let mut store = AutoAssertNoGc::new(&mut **caller.store.0); + match ret.into_abi_for_ret(&mut store, retptr) { Ok(val) => CallResult::Ok(val), Err(trap) => CallResult::Trap(trap.into()), } diff --git a/crates/wasmtime/src/runtime/func/typed.rs b/crates/wasmtime/src/runtime/func/typed.rs index 4cf926061292..198176d99530 100644 --- a/crates/wasmtime/src/runtime/func/typed.rs +++ b/crates/wasmtime/src/runtime/func/typed.rs @@ -1,8 +1,8 @@ use super::{invoke_wasm_and_catch_traps, HostAbi}; use crate::store::{AutoAssertNoGc, StoreOpaque}; use crate::{ - AsContext, AsContextMut, Engine, ExternRef, Func, FuncType, HeapType, NoFunc, RefType, - StoreContextMut, ValRaw, ValType, + AsContext, AsContextMut, Engine, ExternRef, Func, FuncType, HeapType, ManuallyRooted, NoFunc, + RefType, RootSet, Rooted, StoreContextMut, ValRaw, ValType, }; use anyhow::{bail, Context, Result}; use std::marker; @@ -174,11 +174,7 @@ where // values to cross each other. let params = { - // GC is not safe here, since we move refs into the activations - // table but don't hold a strong reference onto them until we enter - // the Wasm frame and they get referenced from the stack maps. - let mut store = AutoAssertNoGc::new(&mut **store.as_context_mut().0); - + let mut store = AutoAssertNoGc::new(store.0); params.into_abi(&mut store, ty)? }; @@ -197,10 +193,13 @@ where ptr::write(ret.as_mut_ptr(), result); *returned = true }); + let (_, ret, _, returned) = captures; debug_assert_eq!(result.is_ok(), returned); result?; - Ok(Results::from_abi(store.0, ret.assume_init())) + + let mut store = AutoAssertNoGc::new(store.0); + Ok(Results::from_abi(&mut store, ret.assume_init())) } /// Purely a debug-mode assertion, not actually used in release builds. @@ -232,7 +231,7 @@ pub unsafe trait WasmTy: Send { // The raw ABI type that values of this type can be converted to and passed // to Wasm, or given from Wasm and converted back from. #[doc(hidden)] - type Abi: Copy; + type Abi: 'static + Copy; // Do a "static" (aka at time of `func.typed::()`) ahead-of-time type // check for this type at the given position. You probably don't need to @@ -326,12 +325,39 @@ pub unsafe trait WasmTy: Send { unsafe fn abi_into_raw(abi: Self::Abi, raw: *mut ValRaw); // Convert `self` into `Self::Abi`. + // + // NB: We _must not_ trigger a GC when passing refs from host code into Wasm + // (e.g. returned from a host function or passed as arguments to a Wasm + // function). After insertion into the activations table, the reference is + // no longer rooted. If multiple references are being sent from the host + // into Wasm and we allowed GCs during insertion, then the following events + // could happen: + // + // * Reference A is inserted into the activations table. This does not + // trigger a GC, but does fill the table to capacity. + // + // * The caller's reference to A is removed. Now the only reference to A is + // from the activations table. + // + // * Reference B is inserted into the activations table. Because the table + // is at capacity, a GC is triggered. + // + // * A is reclaimed because the only reference keeping it alive was the + // activation table's reference (it isn't inside any Wasm frames on the + // stack yet, so stack scanning and stack maps don't increment its + // reference count). + // + // * We transfer control to Wasm, giving it A and B. Wasm uses A. That's a + // use-after-free bug. + // + // In conclusion, to prevent uses-after-free bugs, we cannot GC while + // converting types into their raw ABI forms. #[doc(hidden)] - fn into_abi(self, store: &mut StoreOpaque) -> Self::Abi; + fn into_abi(self, store: &mut AutoAssertNoGc<'_>) -> Result; // Convert back from `Self::Abi` into `Self`. #[doc(hidden)] - unsafe fn from_abi(abi: Self::Abi, store: &mut StoreOpaque) -> Self; + unsafe fn from_abi(abi: Self::Abi, store: &mut AutoAssertNoGc<'_>) -> Self; } macro_rules! integers { @@ -363,11 +389,12 @@ macro_rules! integers { *raw = ValRaw::$primitive(abi); } #[inline] - fn into_abi(self, _store: &mut StoreOpaque) -> Self::Abi { - self + fn into_abi(self, _store: &mut AutoAssertNoGc<'_>) -> Result + { + Ok(self) } #[inline] - unsafe fn from_abi(abi: Self::Abi, _store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(abi: Self::Abi, _store: &mut AutoAssertNoGc<'_>) -> Self { abi } } @@ -410,11 +437,12 @@ macro_rules! floats { *raw = ValRaw::$float(abi.to_bits()); } #[inline] - fn into_abi(self, _store: &mut StoreOpaque) -> Self::Abi { - self + fn into_abi(self, _store: &mut AutoAssertNoGc<'_>) -> Result + { + Ok(self) } #[inline] - unsafe fn from_abi(abi: Self::Abi, _store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(abi: Self::Abi, _store: &mut AutoAssertNoGc<'_>) -> Self { abi } } @@ -427,7 +455,7 @@ floats! { } #[cfg(feature = "gc")] -unsafe impl WasmTy for ExternRef { +unsafe impl WasmTy for Rooted { type Abi = NonNull; #[inline] @@ -436,8 +464,8 @@ unsafe impl WasmTy for ExternRef { } #[inline] - fn compatible_with_store(&self, _store: &StoreOpaque) -> bool { - true + fn compatible_with_store(&self, store: &StoreOpaque) -> bool { + self.comes_from_same_store(store) } #[inline] @@ -463,55 +491,26 @@ unsafe impl WasmTy for ExternRef { } #[inline] - fn into_abi(self, store: &mut StoreOpaque) -> Self::Abi { - let inner = self.into_vm_extern_ref(); + fn into_abi(self, store: &mut AutoAssertNoGc<'_>) -> Result { + let inner = self.try_to_vm_extern_ref(store)?; let abi = inner.as_raw(); unsafe { - // NB: We _must not_ trigger a GC when passing refs from host - // code into Wasm (e.g. returned from a host function or passed - // as arguments to a Wasm function). After insertion into the - // table, this reference is no longer rooted. If multiple - // references are being sent from the host into Wasm and we - // allowed GCs during insertion, then the following events could - // happen: - // - // * Reference A is inserted into the activations - // table. This does not trigger a GC, but does fill the table - // to capacity. - // - // * The caller's reference to A is removed. Now the only - // reference to A is from the activations table. - // - // * Reference B is inserted into the activations table. Because - // the table is at capacity, a GC is triggered. - // - // * A is reclaimed because the only reference keeping it alive - // was the activation table's reference (it isn't inside any - // Wasm frames on the stack yet, so stack scanning and stack - // maps don't increment its reference count). - // - // * We transfer control to Wasm, giving it A and B. Wasm uses - // A. That's a use after free. - // - // In conclusion, to prevent uses after free, we cannot GC - // during this insertion. - let mut store = AutoAssertNoGc::new(store); store.insert_vmexternref_without_gc(inner); debug_assert!(!abi.is_null()); - NonNull::new_unchecked(abi) + Ok(NonNull::new_unchecked(abi)) } } #[inline] - unsafe fn from_abi(abi: Self::Abi, _store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(abi: Self::Abi, store: &mut AutoAssertNoGc<'_>) -> Self { let inner = wasmtime_runtime::VMExternRef::clone_from_raw(abi.as_ptr()).unwrap(); - ExternRef::from_vm_extern_ref(inner) + ExternRef::from_vm_extern_ref(store, inner) } } #[cfg(feature = "gc")] -unsafe impl WasmTy for Option { +unsafe impl WasmTy for Option> { type Abi = *mut u8; #[inline] @@ -520,8 +519,8 @@ unsafe impl WasmTy for Option { } #[inline] - fn compatible_with_store(&self, _store: &StoreOpaque) -> bool { - true + fn compatible_with_store(&self, store: &StoreOpaque) -> bool { + self.map_or(true, |x| x.comes_from_same_store(store)) } #[inline] @@ -545,18 +544,137 @@ unsafe impl WasmTy for Option { } #[inline] - fn into_abi(self, store: &mut StoreOpaque) -> Self::Abi { - if let Some(x) = self { - ::into_abi(x, store).as_ptr() + fn into_abi(self, store: &mut AutoAssertNoGc<'_>) -> Result { + Ok(if let Some(x) = self { + as WasmTy>::into_abi(x, store)?.as_ptr() } else { ptr::null_mut() + }) + } + + #[inline] + unsafe fn from_abi(abi: Self::Abi, store: &mut AutoAssertNoGc<'_>) -> Self { + let inner = wasmtime_runtime::VMExternRef::clone_from_raw(abi)?; + Some(ExternRef::from_vm_extern_ref(store, inner)) + } +} + +#[cfg(feature = "gc")] +unsafe impl WasmTy for ManuallyRooted { + type Abi = NonNull; + + #[inline] + fn valtype() -> ValType { + ValType::Ref(RefType::new(false, HeapType::Extern)) + } + + #[inline] + fn compatible_with_store(&self, store: &StoreOpaque) -> bool { + self.comes_from_same_store(store) + } + + #[inline] + fn dynamic_concrete_type_check(&self, _: &StoreOpaque, _: bool, _: &FuncType) -> Result<()> { + unreachable!() + } + + #[inline] + fn is_externref(&self) -> bool { + true + } + + #[inline] + unsafe fn abi_from_raw(raw: *mut ValRaw) -> Self::Abi { + let p = (*raw).get_externref().cast::(); + debug_assert!(!p.is_null()); + NonNull::new_unchecked(p) + } + + #[inline] + unsafe fn abi_into_raw(abi: NonNull, raw: *mut ValRaw) { + *raw = ValRaw::externref(abi.cast::().as_ptr()); + } + + #[inline] + fn into_abi(self, store: &mut AutoAssertNoGc<'_>) -> Result { + let inner = self.try_to_vm_extern_ref(store)?; + let abi = inner.as_raw(); + unsafe { + store.insert_vmexternref_without_gc(inner); + self._unroot(&mut *store); + + debug_assert!(!abi.is_null()); + Ok(NonNull::new_unchecked(abi)) } } #[inline] - unsafe fn from_abi(abi: Self::Abi, _store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(abi: Self::Abi, store: &mut AutoAssertNoGc<'_>) -> Self { + let inner = wasmtime_runtime::VMExternRef::clone_from_raw(abi.as_ptr()).unwrap(); + RootSet::with_lifo_scope(store, |store| { + let rooted = ExternRef::from_vm_extern_ref(store, inner); + rooted + ._to_manually_rooted(store) + .expect("rooted is in scope") + }) + } +} + +#[cfg(feature = "gc")] +unsafe impl WasmTy for Option> { + type Abi = *mut u8; + + #[inline] + fn valtype() -> ValType { + ValType::EXTERNREF + } + + #[inline] + fn compatible_with_store(&self, store: &StoreOpaque) -> bool { + self.as_ref() + .map_or(true, |x| x.comes_from_same_store(store)) + } + + #[inline] + fn dynamic_concrete_type_check(&self, _: &StoreOpaque, _: bool, _: &FuncType) -> Result<()> { + unreachable!() + } + + #[inline] + fn is_externref(&self) -> bool { + true + } + + #[inline] + unsafe fn abi_from_raw(raw: *mut ValRaw) -> *mut u8 { + (*raw).get_externref() as *mut u8 + } + + #[inline] + unsafe fn abi_into_raw(abi: *mut u8, raw: *mut ValRaw) { + *raw = ValRaw::externref(abi.cast()); + } + + #[inline] + fn into_abi(self, store: &mut AutoAssertNoGc<'_>) -> Result { + Ok(if let Some(x) = self { + as WasmTy>::into_abi(x, store)?.as_ptr() + } else { + ptr::null_mut() + }) + } + + #[inline] + unsafe fn from_abi(abi: Self::Abi, store: &mut AutoAssertNoGc<'_>) -> Self { let inner = wasmtime_runtime::VMExternRef::clone_from_raw(abi)?; - Some(ExternRef::from_vm_extern_ref(inner)) + RootSet::with_lifo_scope(store, |store| { + let rooted = ExternRef::from_vm_extern_ref(store, inner); + Some( + rooted + ._to_manually_rooted(store) + .expect("rooted is in scope"), + ) + }) } } @@ -594,12 +712,12 @@ unsafe impl WasmTy for NoFunc { } #[inline] - fn into_abi(self, _store: &mut StoreOpaque) -> Self::Abi { + fn into_abi(self, _store: &mut AutoAssertNoGc<'_>) -> Result { unreachable!("NoFunc is uninhabited") } #[inline] - unsafe fn from_abi(_abi: Self::Abi, _store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(_abi: Self::Abi, _store: &mut AutoAssertNoGc<'_>) -> Self { unreachable!("NoFunc is uninhabited") } } @@ -648,12 +766,12 @@ unsafe impl WasmTy for Option { } #[inline] - fn into_abi(self, _store: &mut StoreOpaque) -> Self::Abi { - ptr::null_mut() + fn into_abi(self, _store: &mut AutoAssertNoGc<'_>) -> Result { + Ok(ptr::null_mut()) } #[inline] - unsafe fn from_abi(_abi: Self::Abi, _store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(_abi: Self::Abi, _store: &mut AutoAssertNoGc<'_>) -> Self { None } } @@ -700,12 +818,12 @@ unsafe impl WasmTy for Func { } #[inline] - fn into_abi(self, store: &mut StoreOpaque) -> Self::Abi { - self.vm_func_ref(store) + fn into_abi(self, store: &mut AutoAssertNoGc<'_>) -> Result { + Ok(self.vm_func_ref(store)) } #[inline] - unsafe fn from_abi(abi: Self::Abi, store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(abi: Self::Abi, store: &mut AutoAssertNoGc<'_>) -> Self { Func::from_vm_func_ref(store, abi.as_ptr()).unwrap() } } @@ -759,16 +877,16 @@ unsafe impl WasmTy for Option { } #[inline] - fn into_abi(self, store: &mut StoreOpaque) -> Self::Abi { - if let Some(f) = self { + fn into_abi(self, store: &mut AutoAssertNoGc<'_>) -> Result { + Ok(if let Some(f) = self { f.vm_func_ref(store).as_ptr() } else { ptr::null_mut() - } + }) } #[inline] - unsafe fn from_abi(abi: Self::Abi, store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(abi: Self::Abi, store: &mut AutoAssertNoGc<'_>) -> Self { Func::from_vm_func_ref(store, abi) } } @@ -793,7 +911,7 @@ pub unsafe trait WasmParams: Send { fn externrefs_count(&self) -> usize; #[doc(hidden)] - fn into_abi(self, store: &mut StoreOpaque, func_ty: &FuncType) -> Result; + fn into_abi(self, store: &mut AutoAssertNoGc<'_>, func_ty: &FuncType) -> Result; #[doc(hidden)] unsafe fn invoke( @@ -826,7 +944,7 @@ where } #[inline] - fn into_abi(self, store: &mut StoreOpaque, func_ty: &FuncType) -> Result { + fn into_abi(self, store: &mut AutoAssertNoGc<'_>, func_ty: &FuncType) -> Result { <(T,) as WasmParams>::into_abi((self,), store, func_ty) } @@ -884,7 +1002,7 @@ macro_rules! impl_wasm_params { #[inline] fn into_abi( self, - _store: &mut StoreOpaque, + _store: &mut AutoAssertNoGc<'_>, _func_ty: &FuncType, ) -> Result { let ($($t,)*) = self; @@ -903,7 +1021,7 @@ macro_rules! impl_wasm_params { } } - let $t = $t.into_abi(_store); + let $t = $t.into_abi(_store)?; _i += 1; )* @@ -950,7 +1068,7 @@ pub unsafe trait WasmResults: WasmParams { type ResultAbi: HostAbi; #[doc(hidden)] - unsafe fn from_abi(store: &mut StoreOpaque, abi: Self::ResultAbi) -> Self; + unsafe fn from_abi(store: &mut AutoAssertNoGc<'_>, abi: Self::ResultAbi) -> Self; } // Forwards from a bare type `T` to the 1-tuple type `(T,)` @@ -960,7 +1078,7 @@ where { type ResultAbi = <(T,) as WasmResults>::ResultAbi; - unsafe fn from_abi(store: &mut StoreOpaque, abi: Self::ResultAbi) -> Self { + unsafe fn from_abi(store: &mut AutoAssertNoGc<'_>, abi: Self::ResultAbi) -> Self { <(T,) as WasmResults>::from_abi(store, abi).0 } } @@ -974,7 +1092,7 @@ macro_rules! impl_wasm_results { type ResultAbi = ($($t::Abi,)*); #[inline] - unsafe fn from_abi(store: &mut StoreOpaque, abi: Self::ResultAbi) -> Self { + unsafe fn from_abi(store: &mut AutoAssertNoGc<'_>, abi: Self::ResultAbi) -> Self { let ($($t,)*) = abi; ($($t::from_abi($t, store),)*) } diff --git a/crates/wasmtime/src/runtime/gc.rs b/crates/wasmtime/src/runtime/gc.rs new file mode 100644 index 000000000000..df622003ff8a --- /dev/null +++ b/crates/wasmtime/src/runtime/gc.rs @@ -0,0 +1,44 @@ +#[cfg(feature = "gc")] +mod enabled; +#[cfg(feature = "gc")] +pub use enabled::*; + +#[cfg(not(feature = "gc"))] +mod disabled; +#[cfg(not(feature = "gc"))] +pub use disabled::*; + +use std::ops::Deref; + +/// A common trait implemented by all garbage-collected reference types. +/// +/// This is a sealed trait, and may not be implemented for any types outside of +/// the `wasmtime` crate. +pub trait GcRef: GcRefImpl {} + +impl GcRef for T where T: GcRefImpl {} + +/// A trait implemented for GC references that are guaranteed to be rooted: +/// +/// * [`Rooted`][crate::Rooted] +/// * [`ManuallyRooted`][crate::ManuallyRooted] +/// +/// You can use this to abstract over the different kinds of rooted GC +/// references. Note that `Deref` is a supertrait for +/// `RootedGcRef`, so all rooted GC references deref to their underlying `T`, +/// allowing you to call its methods. +/// +/// This is a sealed trait, and may not be implemented for any types outside of +/// the `wasmtime` crate. +pub trait RootedGcRef: RootedGcRefImpl + Deref +where + T: GcRef, +{ +} + +impl RootedGcRef for U +where + T: GcRef, + U: RootedGcRefImpl + Deref, +{ +} diff --git a/crates/wasmtime/src/runtime/gc/disabled.rs b/crates/wasmtime/src/runtime/gc/disabled.rs new file mode 100644 index 000000000000..8d40332ab811 --- /dev/null +++ b/crates/wasmtime/src/runtime/gc/disabled.rs @@ -0,0 +1,262 @@ +//! The dummy `ExternRef` type used when the `gc` cargo feature is disabled. +//! +//! Providing a dummy type means that downstream users need to do fewer +//! `#[cfg(...)]`s versus if this type or its methods simply didn't exist. The +//! only methods that are left missing are constructors. + +#![allow(missing_docs)] + +use crate::runtime::Uninhabited; +use crate::{store::StoreOpaque, AsContext, AsContextMut, GcRef, Result, RootedGcRef}; +use std::any::Any; +use std::ffi::c_void; +use std::fmt::Debug; +use std::hash::Hash; +use std::ops::Deref; +use wasmtime_runtime::VMExternRef; + +mod sealed { + use super::*; + pub trait GcRefImpl {} + pub trait RootedGcRefImpl { + fn assert_unreachable(&self) -> U; + } +} +pub(crate) use sealed::*; + +/// Represents an opaque reference to any data within WebAssembly. +/// +/// Due to compilation configuration, this is an uninhabited type: enable the +/// `gc` cargo feature to properly use this type. +#[derive(Debug)] +pub struct ExternRef { + _inner: Uninhabited, +} + +impl GcRefImpl for ExternRef {} + +impl ExternRef { + pub(crate) fn from_vm_extern_ref(_store: &mut StoreOpaque, inner: VMExternRef) -> Rooted { + inner.assert_unreachable() + } + + pub(crate) fn into_vm_extern_ref(self) -> VMExternRef { + match self._inner {} + } + + pub(crate) fn try_to_vm_extern_ref(&self, _store: &mut StoreOpaque) -> Result { + match self._inner {} + } + + pub fn data<'a>(&self, _store: &'a impl AsContext) -> Result<&'a (dyn Any + Send + Sync)> { + match self._inner {} + } + + pub fn data_mut<'a>( + &self, + _store: &'a mut impl AsContextMut, + ) -> Result<&'a mut (dyn Any + Send + Sync)> { + match self._inner {} + } + + pub unsafe fn from_raw( + _store: impl AsContextMut, + raw: *mut c_void, + ) -> Option> { + assert!(raw.is_null()); + None + } + + pub unsafe fn to_raw(&self, _store: impl AsContextMut) -> Result<*mut c_void> { + match self._inner {} + } +} + +#[derive(Debug, Default)] +pub(crate) struct RootSet {} + +impl RootSet { + pub(crate) fn enter_lifo_scope(&self) -> usize { + usize::MAX + } + + pub(crate) fn exit_lifo_scope(&mut self, _scope: usize) {} + + pub(crate) fn with_lifo_scope( + store: &mut StoreOpaque, + f: impl FnOnce(&mut StoreOpaque) -> T, + ) -> T { + f(store) + } +} + +/// A scoped, rooted GC reference. +/// +/// This type is disabled because the `gc` cargo feature was not enabled at +/// compile time. +pub struct Rooted { + inner: Uninhabited, + _phantom: std::marker::PhantomData, +} + +impl Clone for Rooted { + fn clone(&self) -> Self { + match self.inner {} + } +} + +impl Copy for Rooted {} + +impl Debug for Rooted { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.inner {} + } +} + +impl PartialEq for Rooted { + fn eq(&self, _other: &Self) -> bool { + match self.inner {} + } +} + +impl Eq for Rooted {} + +impl Hash for Rooted { + fn hash(&self, _state: &mut H) { + match self.inner {} + } +} + +impl RootedGcRefImpl for Rooted { + fn assert_unreachable(&self) -> U { + match self.inner {} + } +} + +impl Deref for Rooted { + type Target = T; + + fn deref(&self) -> &Self::Target { + match self.inner {} + } +} + +impl Rooted { + pub(crate) fn comes_from_same_store(&self, _store: &StoreOpaque) -> bool { + match self.inner {} + } + + pub fn to_manually_rooted(&self, _store: impl AsContextMut) -> Result> { + match self.inner {} + } + + pub fn rooted_eq(a: Self, _b: Self) -> bool { + match a.inner {} + } + + pub fn ref_eq( + _store: impl AsContext, + a: &impl RootedGcRef, + _b: &impl RootedGcRef, + ) -> Result { + a.assert_unreachable() + } +} + +/// Nested rooting scopes. +/// +/// This type has been disabled because the `gc` cargo feature was not enabled +/// at compile time. +pub struct RootScope +where + C: AsContextMut, +{ + inner: Uninhabited, + _phantom: std::marker::PhantomData, +} + +impl RootScope +where + C: AsContextMut, +{ + pub fn reserve(&mut self, _additional: usize) { + match self.inner {} + } +} + +impl AsContext for RootScope +where + T: AsContextMut, +{ + type Data = T::Data; + + fn as_context(&self) -> crate::StoreContext<'_, Self::Data> { + match self.inner {} + } +} + +impl AsContextMut for RootScope +where + T: AsContextMut, +{ + fn as_context_mut(&mut self) -> crate::StoreContextMut<'_, Self::Data> { + match self.inner {} + } +} + +/// A rooted reference to a garbage-collected `T` with arbitrary lifetime. +/// +/// This type has been disabled because the `gc` cargo feature was not enabled +/// at compile time. +pub struct ManuallyRooted +where + T: GcRef, +{ + inner: Uninhabited, + _phantom: std::marker::PhantomData, +} + +impl Debug for ManuallyRooted { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.inner {} + } +} + +impl Deref for ManuallyRooted { + type Target = T; + + fn deref(&self) -> &Self::Target { + match self.inner {} + } +} + +impl ManuallyRooted +where + T: GcRef, +{ + pub(crate) fn comes_from_same_store(&self, _store: &StoreOpaque) -> bool { + match self.inner {} + } + + pub fn clone(&self, _store: impl AsContextMut) -> Self { + match self.inner {} + } + + pub fn unroot(self, _store: impl AsContextMut) { + match self.inner {} + } + + pub fn to_rooted(&self, _context: impl AsContextMut) -> Rooted { + match self.inner {} + } + + pub fn into_rooted(self, _context: impl AsContextMut) -> Rooted { + match self.inner {} + } +} + +impl RootedGcRefImpl for ManuallyRooted { + fn assert_unreachable(&self) -> U { + match self.inner {} + } +} diff --git a/crates/wasmtime/src/runtime/gc/enabled.rs b/crates/wasmtime/src/runtime/gc/enabled.rs new file mode 100644 index 000000000000..9e45166b01e0 --- /dev/null +++ b/crates/wasmtime/src/runtime/gc/enabled.rs @@ -0,0 +1,390 @@ +mod rooting; + +use anyhow::anyhow; +pub use rooting::*; + +use crate::store::AutoAssertNoGc; +use crate::{AsContextMut, Result, StoreContext, StoreContextMut}; +use std::any::Any; +use std::ffi::c_void; +use wasmtime_runtime::VMExternRef; + +/// An opaque, GC-managed reference to some host data that can be passed to +/// WebAssembly. +/// +/// The `ExternRef` type represents WebAssembly `externref` values. These are +/// opaque and unforgable to Wasm: they cannot be faked and Wasm can't, for +/// example, cast the integer `0x12345678` into a reference, pretend it is a +/// valid `externref`, and trick the host into dereferencing it and segfaulting +/// or worse. Wasm can't do anything with the `externref`s other than put them +/// in tables, globals, and locals or pass them to other functions. +/// +/// You can use `ExternRef` to give access to host objects and control the +/// operations that Wasm can perform on them via what functions you allow Wasm +/// to import. +/// +/// Note that you can also use `Rooted` as a type parameter with +/// [`Func::typed`][crate::Func::typed]- and +/// [`Func::wrap`][crate::Func::wrap]-style APIs. +/// +/// # Example +/// +/// ``` +/// # use wasmtime::*; +/// # use std::borrow::Cow; +/// # fn _foo() -> Result<()> { +/// let engine = Engine::default(); +/// let mut store = Store::new(&engine, ()); +/// +/// // Define some APIs for working with host strings from Wasm via `externref`. +/// let mut linker = Linker::new(&engine); +/// linker.func_wrap( +/// "host-string", +/// "new", +/// |caller: Caller<'_, ()>| -> Rooted { ExternRef::new(caller, Cow::from("")) }, +/// )?; +/// linker.func_wrap( +/// "host-string", +/// "concat", +/// |mut caller: Caller<'_, ()>, a: Rooted, b: Rooted| -> Result> { +/// let mut s = a +/// .data(&caller)? +/// .downcast_ref::>() +/// .ok_or_else(|| Error::msg("externref was not a string"))? +/// .clone() +/// .into_owned(); +/// let b = b +/// .data(&caller)? +/// .downcast_ref::>() +/// .ok_or_else(|| Error::msg("externref was not a string"))?; +/// s.push_str(&b); +/// Ok(ExternRef::new(&mut caller, s)) +/// }, +/// )?; +/// +/// // Here is a Wasm module that uses those APIs. +/// let module = Module::new( +/// &engine, +/// r#" +/// (module +/// (import "host-string" "concat" (func $concat (param externref externref) +/// (result externref))) +/// (func (export "run") (param externref externref) (result externref) +/// local.get 0 +/// local.get 1 +/// call $concat +/// ) +/// ) +/// "#, +/// )?; +/// +/// // Create a couple `externref`s wrapping `Cow`s. +/// let hello = ExternRef::new(&mut store, Cow::from("Hello, ")); +/// let world = ExternRef::new(&mut store, Cow::from("World!")); +/// +/// // Instantiate the module and pass the `externref`s into it. +/// let instance = linker.instantiate(&mut store, &module)?; +/// let result = instance +/// .get_typed_func::<(Rooted, Rooted), Rooted>(&mut store, "run")? +/// .call(&mut store, (hello, world))?; +/// +/// // The module should have concatenated the strings together! +/// assert_eq!( +/// result.data(&store)?.downcast_ref::>().unwrap(), +/// "Hello, World!" +/// ); +/// # Ok(()) +/// # } +/// ``` +#[derive(Debug)] +#[repr(transparent)] +pub struct ExternRef { + inner: GcRootIndex, +} + +unsafe impl GcRefImpl for ExternRef { + fn transmute_ref(index: &GcRootIndex) -> &Self { + // Safety: `ExternRef` is a newtype of a `GcRootIndex`. + let me: &Self = unsafe { std::mem::transmute(index) }; + + // Assert we really are just a newtype of a `GcRootIndex`. + assert!(matches!( + me, + Self { + inner: GcRootIndex { .. }, + } + )); + + me + } +} + +impl ExternRef { + /// Creates a new instance of `ExternRef` wrapping the given value. + /// + /// The resulting value is automatically unrooted when the given `context`'s + /// scope is exited. See [`Rooted`][crate::Rooted]'s documentation for + /// more details. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn _foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// { + /// let mut scope = RootScope::new(&mut store); + /// + /// // Create an `externref` wrapping a `str`. + /// let externref = ExternRef::new(&mut scope, "hello!"); + /// + /// // Use `externref`... + /// } + /// + /// // The `externref` is automatically unrooted when we exit the scope. + /// # Ok(()) + /// # } + /// ``` + pub fn new(mut context: impl AsContextMut, value: T) -> Rooted + where + T: 'static + Any + Send + Sync, + { + // Safety: We proviode `VMExternRef`'s invariants via the way that + // `ExternRef` methods take `impl AsContext[Mut]` methods. + let inner = unsafe { VMExternRef::new(value) }; + + let mut context = AutoAssertNoGc::new(context.as_context_mut().0); + + // Safety: we just created the `VMExternRef` and are associating it with + // this store. + unsafe { Self::from_vm_extern_ref(&mut context, inner) } + } + + /// Creates a new, manually-rooted instance of `ExternRef` wrapping the + /// given value. + /// + /// The resulting value must be manually unrooted, or else it will leak for + /// the entire duration of the store's lifetime. See + /// [`ManuallyRooted`][crate::ManuallyRooted]'s documentation for more + /// details. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn _foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// // Create a manually-rooted `externref` wrapping a `str`. + /// let externref = ExternRef::new_manually_rooted(&mut store, "hello!"); + /// + /// // Use `externref` a bunch... + /// + /// // Don't forget to explicitly unroot the `externref` when done using it. + /// externref.unroot(&mut store); + /// # Ok(()) + /// # } + /// ``` + pub fn new_manually_rooted( + mut store: impl AsContextMut, + value: T, + ) -> ManuallyRooted + where + T: 'static + Any + Send + Sync, + { + let mut store = AutoAssertNoGc::new(store.as_context_mut().0); + + // Safety: We proviode `VMExternRef`'s invariants via the way that + // `ExternRef` methods take `impl AsContext[Mut]` methods. + let inner = unsafe { VMExternRef::new(value) }; + let inner = unsafe { inner.into_gc_ref() }; + + // Safety: `inner` is a GC reference pointing to an `externref` GC + // object. + unsafe { ManuallyRooted::new(&mut store, inner) } + } + + /// Create an `ExternRef` from an underlying `VMExternRef`. + /// + /// # Safety + /// + /// The underlying `VMExternRef` must belong to `store`. + pub(crate) unsafe fn from_vm_extern_ref( + store: &mut AutoAssertNoGc<'_>, + inner: VMExternRef, + ) -> Rooted { + // Safety: `inner` is a GC reference pointing to an `externref` GC + // object. + unsafe { Rooted::new(store, inner.into_gc_ref()) } + } + + pub(crate) fn to_vm_extern_ref(&self, store: &mut AutoAssertNoGc<'_>) -> Option { + let gc_ref = self.inner.get_gc_ref(store)?; + // Safety: Our underlying `gc_ref` is always pointing to an `externref`. + Some(unsafe { VMExternRef::clone_from_gc_ref(*gc_ref) }) + } + + pub(crate) fn try_to_vm_extern_ref( + &self, + store: &mut AutoAssertNoGc<'_>, + ) -> Result { + self.to_vm_extern_ref(store) + .ok_or_else(|| anyhow!("attempted to use an `externref` that was unrooted")) + } + + /// Get a shared borrow of the underlying data for this `ExternRef`. + /// + /// Returns an error if this `externref` GC reference has been unrooted (eg + /// if you attempt to use a `Rooted` after exiting the scope it + /// was rooted within). See the documentation for + /// [`Rooted`][crate::Rooted] for more details. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn _foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// let externref = ExternRef::new(&mut store, "hello"); + /// + /// // Access the `externref`'s host data. + /// let data = externref.data(&store)?; + /// // Dowcast it to a `&str`. + /// let data = data.downcast_ref::<&str>().ok_or_else(|| Error::msg("not a str"))?; + /// // We should have got the data we created the `externref` with! + /// assert_eq!(*data, "hello"); + /// # Ok(()) + /// # } + /// ``` + pub fn data<'a, T>( + &self, + store: impl Into>, + ) -> Result<&'a (dyn Any + Send + Sync)> + where + T: 'a, + { + let store = store.into().0; + + // Safety: we don't do anything that could cause a GC while handling + // this `gc_ref`. + // + // NB: We can't use AutoAssertNoGc` here because then the lifetime of + // `gc_ref.as_extern_ref()` would only be the lifetime of the `store` + // local, rather than `'a`. + let gc_ref = unsafe { self.inner.unchecked_try_gc_ref(store)? }; + + let externref = gc_ref.as_extern_ref(); + Ok(externref.data()) + } + + /// Get an exclusive borrow of the underlying data for this `ExternRef`. + /// + /// Returns an error if this `externref` GC reference has been unrooted (eg + /// if you attempt to use a `Rooted` after exiting the scope it + /// was rooted within). See the documentation for + /// [`Rooted`][crate::Rooted] for more details. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn _foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// let externref = ExternRef::new::(&mut store, 0); + /// + /// // Access the `externref`'s host data. + /// let data = externref.data_mut(&mut store)?; + /// // Dowcast it to a `usize`. + /// let data = data.downcast_mut::().ok_or_else(|| Error::msg("not a usize"))?; + /// // We initialized to zero. + /// assert_eq!(*data, 0); + /// // And we can mutate the value! + /// *data += 10; + /// # Ok(()) + /// # } + /// ``` + pub fn data_mut<'a, T>( + &self, + store: impl Into>, + ) -> Result<&'a mut (dyn Any + Send + Sync)> + where + T: 'a, + { + let store = store.into(); + + // Safety: we don't do anything that could cause a GC while handling + // this `gc_ref`. + // + // NB: We can't use AutoAssertNoGc` here because then the lifetime of + // `gc_ref.as_extern_ref()` would only be the lifetime of the `store` + // local, rather than `'a`. + let gc_ref = unsafe { self.inner.unchecked_try_gc_ref_mut(store.0)? }; + + let externref = gc_ref.as_extern_ref_mut(); + // Safety: We have a mutable borrow on the store, which prevents + // concurrent access to the underlying `VMExternRef`. + Ok(unsafe { externref.data_mut() }) + } + + /// Creates a new strongly-owned [`ExternRef`] from the raw value provided. + /// + /// This is intended to be used in conjunction with [`Func::new_unchecked`], + /// [`Func::call_unchecked`], and [`ValRaw`] with its `externref` field. + /// + /// This function assumes that `raw` is an externref value which is + /// currently rooted within the [`Store`]. + /// + /// # Unsafety + /// + /// This function is particularly `unsafe` because `raw` not only must be a + /// valid externref value produced prior by `to_raw` but it must also be + /// correctly rooted within the store. When arguments are provided to a + /// callback with [`Func::new_unchecked`], for example, or returned via + /// [`Func::call_unchecked`], if a GC is performed within the store then + /// floating externref values are not rooted and will be GC'd, meaning that + /// this function will no longer be safe to call with the values cleaned up. + /// This function must be invoked *before* possible GC operations can happen + /// (such as calling wasm). + /// + /// When in doubt try to not use this. Instead use the safe Rust APIs of + /// [`TypedFunc`] and friends. + /// + /// [`Func::call_unchecked`]: crate::Func::call_unchecked + /// [`Func::new_unchecked`]: crate::Func::new_unchecked + /// [`Store`]: crate::Store + /// [`TypedFunc`]: crate::TypedFunc + /// [`ValRaw`]: crate::ValRaw + pub unsafe fn from_raw( + mut store: impl AsContextMut, + raw: *mut c_void, + ) -> Option> { + let mut store = AutoAssertNoGc::new(store.as_context_mut().0); + let raw = raw.cast::(); + let inner = VMExternRef::clone_from_raw(raw)?; + Some(Self::from_vm_extern_ref(&mut store, inner)) + } + + /// Converts this [`ExternRef`] to a raw value suitable to store within a + /// [`ValRaw`]. + /// + /// Returns an error if this `externref` has been unrooted. + /// + /// # Unsafety + /// + /// Produces a raw value which is only safe to pass into a store if a GC + /// doesn't happen between when the value is produce and when it's passed + /// into the store. + /// + /// [`ValRaw`]: crate::ValRaw + pub unsafe fn to_raw(&self, mut store: impl AsContextMut) -> Result<*mut c_void> { + let mut store = AutoAssertNoGc::new(store.as_context_mut().0); + let gc_ref = self.inner.try_gc_ref(&store)?; + let inner = VMExternRef::clone_from_gc_ref(*gc_ref); + let raw = inner.as_raw(); + store.insert_vmexternref_without_gc(inner); + Ok(raw.cast()) + } +} diff --git a/crates/wasmtime/src/runtime/gc/enabled/rooting.rs b/crates/wasmtime/src/runtime/gc/enabled/rooting.rs new file mode 100644 index 000000000000..24822d37bcf6 --- /dev/null +++ b/crates/wasmtime/src/runtime/gc/enabled/rooting.rs @@ -0,0 +1,1700 @@ +//! Garbage collection rooting APIs. +//! +//! Rooting prevents GC objects from being collected while they are actively +//! being used. +//! +//! ## Goals +//! +//! We have a few sometimes-conflicting goals with our GC rooting APIs: +//! +//! 1. Safety: It should never be possible to get a use-after-free bug because +//! the user misused the rooting APIs, the collector "mistakenly" determined +//! an object was unreachable and collected it, and then the user tried to +//! access the object. This is our highest priority. +//! +//! 2. Moving GC: Our rooting APIs should moving collectors (such as +//! generational and compacting collectors) where an object might get +//! relocated after a collection and we need to update the GC root's pointer +//! to the moved object. This means we either need cooperation and internal +//! mutability from individual GC roots as well as the ability to enumerate +//! all GC roots on the native Rust stack, or we need a level of indirection. +//! +//! 3. Performance: Our rooting APIs should generally be as low-overhead as +//! possible. They definitely shouldn't require synchronization and locking +//! to create, access, and drop GC roots. +//! +//! 4. Ergonomics: Our rooting APIs should be, if not a pleasure, then at least +//! not a burden for users. Additionally, the API's types should be `Sync` +//! and `Send` so that they work well with async Rust. +//! +//! For example, goals (3) and (4) are in conflict when we think about how to +//! support (2). Ideally, for ergonomics, a root would automatically unroot +//! itself when dropped. But in the general case that requires holding a +//! reference to the store's root set, and that root set needs to be held +//! simultaneously by all GC roots, and they each need to mutate the set to +//! unroot themselves. That implies `Rc>` or `Arc>`! The +//! former makes the store and GC root types not `Send` and not `Sync`. The +//! latter imposes synchronization and locking overhead. So we instead make GC +//! roots indirect and require passing in a store context explicitly to unroot +//! in the general case. This trades worse ergonomics for better performance and +//! support for moving GC. +//! +//! ## Two Flavors of Rooting API +//! +//! Okay, with that out of the way, this module provides two flavors of rooting +//! API. One for the common, scoped lifetime case, and another for the rare case +//! where we really need a GC root with an arbitrary, non-LIFO/non-scoped +//! lifetime: +//! +//! 1. `RootScope` and `Rooted`: These are used for temporarily rooting GC +//! objects for the duration of a scope. The internal implementation takes +//! advantage of the LIFO property inherent in scopes, making creating and +//! dropping `Rooted`s and `RootScope`s super fast and roughly equivalent +//! to bump allocation. +//! +//! This type is vaguely similar to V8's [`HandleScope`]. +//! +//! [`HandleScope`]: https://v8.github.io/api/head/classv8_1_1HandleScope.html +//! +//! Note that `Rooted` can't be statically tied to its context scope via a +//! lifetime parameter, unfortunately, as that would allow the creation of +//! only one `Rooted` at a time, since the `Rooted` would take a borrow +//! of the whole context. +//! +//! This supports the common use case for rooting and provides good +//! ergonomics. +//! +//! 2. `ManuallyRooted`: This is the fully general rooting API used for +//! holding onto non-LIFO GC roots with arbitrary lifetimes. However, users +//! must manually unroot them. Failure to manually unroot a +//! `ManuallyRooted` before it is dropped will result in the GC object +//! (and everything it transitively references) leaking for the duration of +//! the `Store`'s lifetime. +//! +//! This type is roughly similar to SpiderMonkey's [`PersistentRooted`], +//! although they avoid the manual-unrooting with internal mutation and +//! shared references. (Our constraints mean we can't do those things, as +//! mentioned explained above.) +//! +//! [`PersistentRooted`]: http://devdoc.net/web/developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey/JSAPI_reference/JS::PersistentRooted.html +//! +//! At the end of the day, both `Rooted` and `ManuallyRooted` are just +//! tagged indices into the store's `RootSet`. This indirection allows working +//! with Rust's borrowing discipline (we use `&mut Store` to represent mutable +//! access to the GC heap) while still allowing rooted references to be moved +//! around without tying up the whole store in borrows. Additionally, and +//! crucially, this indirection allows us to update the *actual* GC pointers in +//! the `RootSet` and support moving GCs (again, as mentioned above). +//! +//! ## Unrooted References +//! +//! We generally don't expose *unrooted* GC references in the Wasmtime API at +//! this time -- and I expect it will be a very long time before we do, but in +//! the limit we may want to let users define their own GC-managed types that +//! participate in GC tracing and all that -- so we don't have to worry about +//! failure to root an object causing use-after-free bugs or failing to update a +//! GC root pointer after a moving GC as long as users stick to our safe rooting +//! APIs. (The one exception is `ValRaw`, which does hold raw GC references. But +//! with `ValRaw` all bets are off and safety is 100% up to the user.) +//! +//! We do, however, have to worry about these things internally. So first of +//! all, try to avoid ever working with unrooted GC references if you +//! can. However, if you really must, consider also using an `AutoAssertNoGc` +//! across the block of code that is manipulating raw GC references. + +use crate::{ + store::{AutoAssertNoGc, StoreId, StoreOpaque}, + AsContext, AsContextMut, GcRef, Result, RootedGcRef, +}; +use anyhow::anyhow; +use std::{ + fmt::Debug, + hash::Hash, + ops::{Deref, DerefMut}, +}; +use wasmtime_runtime::{VMExternRef, VMGcRef}; +use wasmtime_slab::{Id as SlabId, Slab}; + +mod sealed { + use super::*; + + /// Sealed, `wasmtime`-internal trait for GC references. + /// + /// # Safety + /// + /// All types implementing this trait must: + /// + /// * Be a newtype of a `GcRootIndex` + /// + /// * Not implement `Copy` or `Clone` + /// + /// * Only have `&self` methods. + pub unsafe trait GcRefImpl: Sized { + /// Transmute a `&GcRootIndex` into an `&Self`. + fn transmute_ref(index: &GcRootIndex) -> &Self; + } + + /// Sealed, `wasmtime`-internal trait for the common methods on rooted GC + /// references. + pub trait RootedGcRefImpl { + /// Get this rooted GC reference's raw `VMGcRef` out of the store's GC + /// root set. + /// + /// Returns `None` for objects that have since been unrooted (eg because + /// its associated `RootedScope` was dropped). + /// + /// Panics if this root is not associated with the given store. + fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef>; + + /// Like `get_gc_ref` but for mutable references. + fn get_gc_ref_mut<'a>(&self, store: &'a mut StoreOpaque) -> Option<&'a mut VMGcRef>; + + /// Same as `get_gc_ref` but returns an error instead of `None` for + /// objects that have been unrooted. + fn try_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Result<&'a VMGcRef> { + self.get_gc_ref(store).ok_or_else(|| { + anyhow!("attempted to use a garbage-collected object that has been unrooted") + }) + } + + /// Like `try_gc_ref` but for mutable references. + fn try_gc_ref_mut<'a>(&self, store: &'a mut StoreOpaque) -> Result<&'a mut VMGcRef> { + self.get_gc_ref_mut(store).ok_or_else(|| { + anyhow!("attempted to use a garbage-collected object that has been unrooted") + }) + } + } +} +pub(crate) use sealed::*; + +/// The index of a GC root inside a particular store's GC root set. +/// +/// Can be either a LIFO- or manually-rooted object, depending on the +/// `PackedIndex`. +/// +/// Every `T` such that `T: GcRef` must be a newtype over this `GcRootIndex`. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +// Just `pub` to avoid `warn(private_interfaces)` in public APIs, which we can't +// `allow(...)` on our MSRV yet. +#[doc(hidden)] +pub struct GcRootIndex { + store_id: StoreId, + generation: u32, + index: PackedIndex, +} + +impl GcRootIndex { + #[inline] + pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool { + self.store_id == store.id() + } + + /// Same as `RootedGcRefImpl::get_gc_ref` but doesn't check that the raw GC + /// ref is only used during the scope of an `AutoAssertNoGc`. + /// + /// # Safety + /// + /// You must not trigger a GC while holding onto the resulting raw + /// `VMGcRef`. + pub(crate) unsafe fn unchecked_get_gc_ref<'a>( + &self, + store: &'a StoreOpaque, + ) -> Option<&'a VMGcRef> { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + if let Some(index) = self.index.as_lifo() { + let entry = store.gc_roots().lifo_roots.get(index)?; + if entry.generation == self.generation { + Some(&entry.gc_ref) + } else { + None + } + } else if let Some(id) = self.index.as_manual() { + let gc_ref = store.gc_roots().manually_rooted.get(id); + debug_assert!(gc_ref.is_some()); + gc_ref + } else { + unreachable!() + } + } + + /// Same as `RootedGcRefImpl::get_gc_ref` but not associated with any + /// particular `T: GcRef`. + pub(crate) fn get_gc_ref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Option<&'a VMGcRef> { + unsafe { self.unchecked_get_gc_ref(store) } + } + + /// Same as `unchecked_get_gc_ref` but returns an error instead of `None` if + /// the GC reference has been unrooted. + /// + /// # Safety + /// + /// You must not trigger a GC while holding onto the resulting raw + /// `VMGcRef`. + pub(crate) unsafe fn unchecked_try_gc_ref<'a>( + &self, + store: &'a StoreOpaque, + ) -> Result<&'a VMGcRef> { + self.unchecked_get_gc_ref(store).ok_or_else(|| { + anyhow!("attempted to use a garbage-collected object that has been unrooted") + }) + } + + /// Same as `get_gc_ref` but returns an error instead of `None` if the GC + /// reference has been unrooted. + pub(crate) fn try_gc_ref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcRef> { + self.get_gc_ref(store).ok_or_else(|| { + anyhow!("attempted to use a garbage-collected object that has been unrooted") + }) + } + + /// Same as `RootedGcRefImpl::get_gc_ref_mut` but doesn't check that the raw + /// GC ref is only used during the scope of an `AutoAssertNoGc`. + /// + /// # Safety + /// + /// You must not trigger a GC while holding onto the resulting raw + /// `VMGcRef`. + pub(crate) unsafe fn unchecked_get_gc_ref_mut<'a>( + &self, + store: &'a mut StoreOpaque, + ) -> Option<&'a mut VMGcRef> { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + if let Some(index) = self.index.as_lifo() { + let entry = store.gc_roots_mut().lifo_roots.get_mut(index)?; + if entry.generation == self.generation { + Some(&mut entry.gc_ref) + } else { + None + } + } else if let Some(id) = self.index.as_manual() { + let gc_ref = store.gc_roots_mut().manually_rooted.get_mut(id); + debug_assert!(gc_ref.is_some()); + gc_ref + } else { + unreachable!() + } + } + + /// Same as `RootedGcRefImpl::get_gc_ref_mut` but not associated with any + /// particular `T: GcRef`. + #[allow(dead_code)] // not currently used, but added for consistency + pub(crate) fn get_gc_ref_mut<'a>( + &self, + store: &'a mut AutoAssertNoGc<'_>, + ) -> Option<&'a mut VMGcRef> { + unsafe { self.unchecked_get_gc_ref_mut(store) } + } + + /// Same as `unchecked_get_gc_ref_mut` but returns an error instead of + /// `None` if the GC reference has been unrooted. + /// + /// # Safety + /// + /// You must not trigger a GC while holding onto the resulting raw + /// `VMGcRef`. + pub(crate) unsafe fn unchecked_try_gc_ref_mut<'a>( + &self, + store: &'a mut StoreOpaque, + ) -> Result<&'a mut VMGcRef> { + self.unchecked_get_gc_ref_mut(store).ok_or_else(|| { + anyhow!("attempted to use a garbage-collected object that has been unrooted") + }) + } + + /// Same as `get_gc_ref_mut` but returns an error instead of `None` if the + /// GC reference has been unrooted. + #[allow(dead_code)] // not currently used, but added for consistency + pub(crate) fn try_gc_ref_mut<'a>( + &self, + store: &'a mut AutoAssertNoGc<'_>, + ) -> Result<&'a mut VMGcRef> { + self.get_gc_ref_mut(store).ok_or_else(|| { + anyhow!("attempted to use a garbage-collected object that has been unrooted") + }) + } +} + +/// This is a bit-packed version of +/// +/// ```ignore +/// enema { +/// Lifo(usize), +/// Manual(SlabId), +/// } +/// ``` +/// +/// where the high bit is the discriminant and the lower 31 bits are the +/// payload. +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +struct PackedIndex(u32); + +impl Debug for PackedIndex { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(index) = self.as_lifo() { + f.debug_tuple("PackedIndex::Lifo").field(&index).finish() + } else if let Some(id) = self.as_manual() { + f.debug_tuple("PackedIndex::Manual").field(&id).finish() + } else { + unreachable!() + } + } +} + +impl PackedIndex { + const DISCRIMINANT_MASK: u32 = 0b1 << 31; + const LIFO_DISCRIMINANT: u32 = 0b0 << 31; + const MANUAL_DISCRIMINANT: u32 = 0b1 << 31; + const PAYLOAD_MASK: u32 = !Self::DISCRIMINANT_MASK; + + fn new_lifo(index: usize) -> PackedIndex { + let index32 = u32::try_from(index).unwrap(); + assert_eq!(index32 & Self::DISCRIMINANT_MASK, 0); + let packed = PackedIndex(Self::LIFO_DISCRIMINANT | index32); + debug_assert!(packed.is_lifo()); + debug_assert_eq!(packed.as_lifo(), Some(index)); + debug_assert!(!packed.is_manual()); + debug_assert!(packed.as_manual().is_none()); + packed + } + + fn new_manual(id: SlabId) -> PackedIndex { + let raw = id.into_raw(); + assert_eq!(raw & Self::DISCRIMINANT_MASK, 0); + let packed = PackedIndex(Self::MANUAL_DISCRIMINANT | raw); + debug_assert!(packed.is_manual()); + debug_assert_eq!(packed.as_manual(), Some(id)); + debug_assert!(!packed.is_lifo()); + debug_assert!(packed.as_lifo().is_none()); + packed + } + + fn discriminant(&self) -> u32 { + self.0 & Self::DISCRIMINANT_MASK + } + + fn is_lifo(&self) -> bool { + self.discriminant() == Self::LIFO_DISCRIMINANT + } + + fn is_manual(&self) -> bool { + self.discriminant() == Self::MANUAL_DISCRIMINANT + } + + fn payload(&self) -> u32 { + self.0 & Self::PAYLOAD_MASK + } + + fn as_lifo(&self) -> Option { + if self.is_lifo() { + Some(usize::try_from(self.payload()).unwrap()) + } else { + None + } + } + + fn as_manual(&self) -> Option { + if self.is_manual() { + Some(SlabId::from_raw(self.payload())) + } else { + None + } + } +} + +/// The set of all GC roots in a single store/heap. +#[derive(Debug, Default)] +pub(crate) struct RootSet { + /// GC roots with arbitrary lifetime that are manually rooted and unrooted, + /// for use with `ManuallyRooted`. + manually_rooted: Slab, + + /// Strictly LIFO-ordered GC roots, for use with `RootScope` and + /// `Rooted`. + lifo_roots: Vec, + + /// Generation counter for entries to prevent ABA bugs with `RootScope` and + /// `Rooted`. + lifo_generation: u32, +} + +impl Drop for RootSet { + fn drop(&mut self) { + // Drop our `ExternRef` roots. In the future this will be a series of + // calls to `wasmtime_runtime::GcRuntime::on_unroot` trait method hook + // or something like that. However, this is all unnecessary for + // non-reference-counting collectors. + + self.exit_lifo_scope(0); + + for (_id, gc_ref) in self.manually_rooted.drain() { + // (Inlined copy of `self.unroot_gc_ref(gc_ref)` to avoid borrowing + // `self` while `self.manually_rooted` is already borrowed). + // + // Safety: our mutable access to the root set means that no one else + // should have concurrent access to the `VMExternRef`, so + // decrementing the reference count here is safe. + unsafe { + assert!(VMGcRef::ONLY_EXTERN_REF_IMPLEMENTED_YET); + let _ = VMExternRef::from_gc_ref(gc_ref); + } + } + } +} + +#[derive(Debug)] +struct LifoRoot { + generation: u32, + gc_ref: VMGcRef, +} + +impl RootSet { + /// Enter a LIFO rooting scope. + /// + /// Returns an integer that should be passed unmodified to `exit_lifo_scope` + /// when the scope is finished. + /// + /// Calls to `{enter,exit}_lifo_scope` must happen in a strict LIFO order. + #[inline] + pub(crate) fn enter_lifo_scope(&self) -> usize { + let len = self.lifo_roots.len(); + log::debug!("Entering GC root set LIFO scope: {len}"); + len + } + + /// Exit a LIFO rooting scope. + /// + /// The `scope` argument must be the result of the corresponding + /// `enter_lifo_scope` call. + /// + /// Calls to `{enter,exit}_lifo_scope` must happen in a strict LIFO order. + #[inline] + pub(crate) fn exit_lifo_scope(&mut self, scope: usize) { + log::debug!("Exiting GC root set LIFO scope: {scope}"); + debug_assert!(self.lifo_roots.len() >= scope); + + // If we actually have roots to unroot, call an out-of-line slow path. + if self.lifo_roots.len() > scope { + self.exit_lifo_scope_slow(scope) + } + } + + #[inline(never)] + #[cold] + fn exit_lifo_scope_slow(&mut self, scope: usize) { + // In the case where we have a tracing GC, this should really be: + // + // self.lifo_roots.truncate(scope); + // + // In the meantime, without deferred reference-counting collector for + // `externref`, we need to drop these references. In the future this + // will be a `wasmtime_runtime::GcRuntime::on_unroot` trait method hook + // or something like that, but where we can skip this whole loop and + // just do the above truncate when the collector doesn't need it. + for root in self.lifo_roots.drain(scope..) { + // (Inlined copy of `self.unroot_gc_ref(root.gc_ref)` to avoid + // borrowing `self` while `self.manually_rooted` is already + // borrowed). + // + // Safety: our mutable access to the root set means that no one else + // should have concurrent access to the `VMExternRef`, so + // decrementing the reference count here is safe. + unsafe { + assert!(VMGcRef::ONLY_EXTERN_REF_IMPLEMENTED_YET); + let _ = VMExternRef::from_gc_ref(root.gc_ref); + } + } + + self.lifo_generation += 1; + } + + pub(crate) fn with_lifo_scope(store: &mut S, f: impl FnOnce(&mut S) -> T) -> T + where + S: DerefMut, + { + let scope = store.gc_roots().enter_lifo_scope(); + let ret = f(store); + store.gc_roots_mut().exit_lifo_scope(scope); + ret + } + + /// Hook for when a `gc_ref` is being unrooted. + /// + /// In the future, when we support multiple GC implementations, this should + /// be optional. + /// + /// # Safety + /// + /// The `gc_ref` must be rooted in this root set and belong to this root + /// set's store. + unsafe fn unroot_gc_ref(&mut self, gc_ref: VMGcRef) { + // Safety: our mutable access to the root set means that no one else + // should have concurrent access to the `VMExternRef`, so + // decrementing the reference count here is safe. + unsafe { + assert!(VMGcRef::ONLY_EXTERN_REF_IMPLEMENTED_YET); + let _ = VMExternRef::from_gc_ref(gc_ref); + } + } +} + +/// Clone a GC raw root. +/// +/// In the future, this will be a method on a `wasmtime_runtime::GcRuntime` +/// trait or something like that. +/// +/// # Safety +/// +/// The given `gc_ref` must belong to the given store. +unsafe fn clone_root(_store: &mut StoreOpaque, gc_ref: VMGcRef) -> VMGcRef { + // Safety: `externref`s are the only GC objects at this moment. + assert!(VMGcRef::ONLY_EXTERN_REF_IMPLEMENTED_YET); + unsafe { + let externref = VMExternRef::clone_from_gc_ref(gc_ref); + externref.into_gc_ref() + } +} + +/// A scoped, rooted reference to a garbage-collected `T`. +/// +/// A `Rooted` is a strong handle to a garbage-collected `T`, preventing its +/// referent (and anything else transitively referenced) from being collected by +/// the GC during the scope within which this `Rooted` was created. +/// +/// When the context exits this `Rooted`'s scope, the underlying GC object is +/// automatically unrooted and any further attempts to use access the underlying +/// object will return errors or otherwise fail. +/// +/// `Rooted` dereferences to its underlying `T`, allowing you to call `T`'s +/// methods. +/// +/// # Example +/// +/// ``` +/// # use wasmtime::*; +/// # fn _foo() -> Result<()> { +/// let mut store = Store::<()>::default(); +/// +/// // Allocating a GC object returns a `Rooted`. +/// let hello: Rooted = ExternRef::new(&mut store, "hello"); +/// +/// // Because `Rooted` derefs to `T`, we can call `T` methods on a +/// // `Rooted`. For example, we can call the `ExternRef::data` method when we +/// // have a `Rooted`. +/// let data = hello +/// .data(&store)? +/// .downcast_ref::<&str>() +/// .ok_or_else(|| Error::msg("not a str"))?; +/// assert_eq!(*data, "hello"); +/// +/// // A `Rooted` roots its underlying GC object for the duration of the +/// // scope of the store/caller/context that was passed to the method that created +/// // it. If we only want to keep a GC reference rooted and alive temporarily, we +/// // can introduce new scopes with `RootScope`. +/// { +/// let mut scope = RootScope::new(&mut store); +/// +/// // This `Rooted` is automatically unrooted after `scope` is dropped, +/// // allowing the collector to reclaim its GC object in the next GC. +/// let scoped_ref = ExternRef::new(&mut scope, "goodbye"); +/// } +/// +/// let module = Module::new(store.engine(), r#" +/// (module +/// (global (export "global") (mut externref) (ref.null extern)) +/// (table (export "table") 10 externref) +/// (func (export "func") (param externref) (result externref) +/// local.get 0 +/// ) +/// ) +/// "#)?; +/// let instance = Instance::new(&mut store, &module, &[])?; +/// +/// // GC references returned from calls into Wasm also return (optional, if the +/// // Wasm type is nullable) `Rooted`s. +/// let result: Option> = instance +/// .get_typed_func::>, Option>>(&mut store, "func")? +/// .call(&mut store, Some(hello))?; +/// +/// // Similarly, getting a GC reference from a Wasm instance's exported global +/// // or table yields a `Rooted`. +/// +/// let global = instance +/// .get_global(&mut store, "global") +/// .ok_or_else(|| Error::msg("missing `global` export"))?; +/// let global_val = global.get(&mut store); +/// let global_ref: Option<&Rooted<_>> = global_val +/// .externref() +/// .ok_or_else(|| Error::msg("not an externref"))?; +/// +/// let table = instance.get_table(&mut store, "table").unwrap(); +/// let table_elem = table +/// .get(&mut store, 3) +/// .ok_or_else(|| Error::msg("table out of bounds"))?; +/// let table_elem_ref: Option<&Rooted<_>> = table_elem +/// .as_extern() +/// .ok_or_else(|| Error::msg("not an externref"))?; +/// # Ok(()) +/// # } +/// ``` +/// +/// # Differences Between `Rooted` and `ManuallyRooted` +/// +/// While `Rooted` is automatically unrooted when its scope is exited, this +/// means that `Rooted` is only valid for strictly last-in-first-out (LIFO, +/// aka stack order) lifetimes. This is in contrast to +/// [`ManuallyRooted`][crate::ManuallyRooted], which supports rooting GC +/// objects for arbitrary lifetimes, but requires manual unrooting. +/// +/// | Type | Supported Lifetimes | Unrooting | +/// |----------------------------------------------|-----------------------------|-----------| +/// | [`Rooted`][crate::Rooted] | Strictly LIFO / stack order | Automatic | +/// | [`ManuallyRooted`][crate::ManuallyRooted] | Arbitrary | Manual | +/// +/// `Rooted` should suffice for most use cases, and provides better +/// ergonomics, but `ManuallyRooted` exists as a fully-general escape hatch. +/// +/// # Scopes +/// +/// Wasmtime automatically creates two kinds of scopes: +/// +/// 1. A [`Store`][crate::Store] is the outermost rooting scope. Creating a +/// `Root` directly inside a `Store` permanently roots the underlying +/// object, similar to dropping a +/// [`ManuallyRooted`][crate::ManuallyRooted] without unrooting it. +/// +/// 2. A [`Caller`][crate::Caller] provides a rooting scope for the duration of +/// a call from Wasm into a host function. Any objects rooted in a `Caller` +/// will be unrooted after the host function returns. Note that there can be +/// nested `Caller` scopes in the case where Wasm calls a host function, +/// creating the first `Caller` and its rooting scope , and then the host +/// function calls a Wasm function which then calls another host function, +/// creating a second `Caller` and a second rooting scope. This nesting can +/// be arbitrarily deep. +/// +/// Additionally, if you would like to define finer-grained rooting scopes, +/// Wasmtime provides the [`RootScope`][crate::RootScope] type. +/// +/// Scopes are always nested in a last-in-first-out (LIFO) order. An outer scope +/// is never exited (and the `Rooted`s defined within it are never +/// automatically unrooted) while an inner scope is still active. All inner +/// scopes are exited before their outer scopes. +/// +/// The following diagram illustrates various rooting scopes over time, how they +/// nest, and when their `Rooted`s are automatically unrooted: +/// +/// ```text +/// ----- new Store +/// | +/// | +/// | let a: Rooted = ...; +/// | +/// | +/// | ----- call into Wasm +/// | | +/// | | +/// | | ----- Wasm calls host function F +/// | | | +/// | | | +/// | | | let b: Rooted = ...; +/// | | | +/// | | | +/// | | | ----- F calls into Wasm +/// | | | | +/// | | | | +/// | | | | ----- Wasm call host function G +/// | | | | | +/// | | | | | +/// | | | | | let c: Rooted = ...; +/// | | | | | +/// | | | | | +/// | | | | ----- return to Wasm from host function G (unroots `c`) +/// | | | | +/// | | | | +/// | | | ----- Wasm returns to F +/// | | | +/// | | | +/// | | ----- return from host function F (unroots `b`) +/// | | +/// | | +/// | ----- return from Wasm +/// | +/// | +/// | ----- let scope1 = RootScope::new(...); +/// | | +/// | | +/// | | let d: Rooted = ...; +/// | | +/// | | +/// | | ----- let scope2 = RootScope::new(...); +/// | | | +/// | | | +/// | | | let e: Rooted = ...; +/// | | | +/// | | | +/// | | ----- drop `scope2` (unroots `e`) +/// | | +/// | | +/// | ----- drop `scope1` (unroots `d`) +/// | +/// | +/// ----- drop Store (unroots `a`) +/// ``` +/// +/// A `Rooted` can be used successfully as long as it is still rooted so, in +/// the above diagram, `d` is valid inside `scope2` because `scope2` is wholly +/// contained within the scope `d` was rooted within (`scope1`). +/// +/// See also the documentation for [`RootScope`][crate::RootScope]. +#[repr(transparent)] +pub struct Rooted { + inner: GcRootIndex, + _phantom: std::marker::PhantomData, +} + +impl Clone for Rooted { + fn clone(&self) -> Self { + Rooted { + inner: self.inner, + _phantom: std::marker::PhantomData, + } + } +} + +impl Copy for Rooted {} + +impl Debug for Rooted { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let name = format!("Rooted<{}>", std::any::type_name::()); + f.debug_struct(&name).field("inner", &self.inner).finish() + } +} + +impl RootedGcRefImpl for Rooted { + fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef> { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + let index = self.inner.index.as_lifo().unwrap(); + let entry = store.gc_roots().lifo_roots.get(index)?; + if entry.generation == self.inner.generation { + Some(&entry.gc_ref) + } else { + None + } + } + + fn get_gc_ref_mut<'a>(&self, store: &'a mut StoreOpaque) -> Option<&'a mut VMGcRef> { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + let index = self.inner.index.as_lifo().unwrap(); + let entry = store.gc_roots_mut().lifo_roots.get_mut(index)?; + if entry.generation == self.inner.generation { + Some(&mut entry.gc_ref) + } else { + None + } + } +} + +impl Deref for Rooted { + type Target = T; + + fn deref(&self) -> &Self::Target { + T::transmute_ref(&self.inner) + } +} + +impl Rooted { + /// Push the given `VMGcRef` onto our LIFO root set. + /// + /// # Safety + /// + /// `gc_ref` must be a valid GC reference pointing to an instance of the GC + /// type that `T` represents. + /// + /// `gc_ref` must belong to `store`'s heap. + pub(crate) unsafe fn new(store: &mut AutoAssertNoGc<'_>, gc_ref: VMGcRef) -> Rooted { + let roots = store.gc_roots_mut(); + let generation = roots.lifo_generation; + let index = roots.lifo_roots.len(); + let index = PackedIndex::new_lifo(index); + + roots.lifo_roots.push(LifoRoot { generation, gc_ref }); + + Rooted { + inner: GcRootIndex { + store_id: store.id(), + generation, + index, + }, + _phantom: std::marker::PhantomData, + } + } + + #[inline] + pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool { + debug_assert!(self.inner.index.is_lifo()); + self.inner.comes_from_same_store(store) + } + + /// Create a [`ManuallyRooted`][crate::ManuallyRooted] holding onto the + /// same GC object as `self`. + /// + /// Returns `None` if `self` is used outside of its scope and has therefore + /// been unrooted. + /// + /// This does not unroot `self`, and `self` remains valid until its + /// associated scope is exited. + /// + /// # Panics + /// + /// Panics if this object is not associate with the given store. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn _foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// let y: ManuallyRooted<_> = { + /// // Create a nested rooting scope. + /// let mut scope = RootScope::new(&mut store); + /// + /// // `x` is only rooted within this nested scope. + /// let x: Rooted<_> = ExternRef::new(&mut scope, "hello!"); + /// + /// // Extend `x`'s rooting past its scope's lifetime by converting it + /// // to a `ManuallyRooted`. + /// x.to_manually_rooted(&mut scope)? + /// }; + /// + /// // Now we can still access the reference outside the scope it was + /// // originally defined within. + /// let data = y.data(&store)?; + /// let data = data.downcast_ref::<&str>().unwrap(); + /// assert_eq!(*data, "hello!"); + /// + /// // But we have to manually unroot `y`. + /// y.unroot(&mut store); + /// # Ok(()) + /// # } + /// ``` + pub fn to_manually_rooted(&self, mut store: impl AsContextMut) -> Result> { + self._to_manually_rooted(store.as_context_mut().0) + } + + pub(crate) fn _to_manually_rooted(&self, store: &mut StoreOpaque) -> Result> { + let mut store = AutoAssertNoGc::new(store); + let gc_ref = *self.try_gc_ref(&store)?; + + // Safety: `gc_ref` belongs to the store, asserted by `try_gc_ref`. + let gc_ref = unsafe { clone_root(&mut store, gc_ref) }; + + // Safety: `gc_ref` is a `T`, since we got it from `self`. + Ok(unsafe { ManuallyRooted::new(&mut store, gc_ref) }) + } + + /// Are these two `Rooted`s the same GC root? + /// + /// Note that this function can return `false` even when `a` and `b` are + /// rooting the same underlying GC object, but the object was rooted + /// multiple times (for example in different scopes). Use + /// [`Rooted::ref_eq`][crate::Rooted::ref_eq] to test whether these are + /// references to the same underlying GC object or not. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// let a = ExternRef::new(&mut store, "hello"); + /// let b = a; + /// + /// // `a` and `b` are the same GC root. + /// assert!(Rooted::rooted_eq(a, b)); + /// + /// { + /// let mut scope = RootScope::new(&mut store); + /// + /// // `c` is a different GC root, in a different scope, even though it + /// // is rooting the same object. + /// let c = a.to_manually_rooted(&mut scope)?.into_rooted(&mut scope); + /// assert!(!Rooted::rooted_eq(a, c)); + /// } + /// + /// let x = ExternRef::new(&mut store, "goodbye"); + /// + /// // `a` and `x` are different GC roots, rooting different objects. + /// assert!(!Rooted::rooted_eq(a, x)); + /// # Ok(()) + /// # } + /// ``` + pub fn rooted_eq(a: Self, b: Self) -> bool { + a.inner == b.inner + } + + /// Are these two GC roots referencing the same underlying GC object? + /// + /// This function will return `true` even when `a` and `b` are different GC + /// roots (for example because they were rooted in different scopes) if they + /// are rooting the same underlying GC object. To only test whether they are + /// the same GC root, and not whether they are rooting the same GC object, + /// use [`Rooted::rooted_eq`][crate::Rooted::rooted_eq]. + /// + /// Returns an error if either `a` or `b` has been unrooted, for example + /// because the scope it was rooted within has been exited. + /// + /// Because this method takes any `impl RootedGcRef` arguments, it can be + /// used to compare, for example, a `Rooted` and a `ManuallyRooted`. + /// + /// # Panics + /// + /// Panics if either `a` or `b` is not associated with the given `store`. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// let a = ExternRef::new(&mut store, "hello"); + /// let b = a; + /// + /// // `a` and `b` are rooting the same object. + /// assert!(Rooted::ref_eq(&store, &a, &b)?); + /// + /// { + /// let mut scope = RootScope::new(&mut store); + /// + /// // `c` is a different GC root, in a different scope, but still + /// // rooting the same object. + /// let c = a.to_manually_rooted(&mut scope)?.into_rooted(&mut scope); + /// assert!(!Rooted::ref_eq(&scope, &a, &c)?); + /// } + /// + /// let x = ExternRef::new(&mut store, "goodbye"); + /// + /// // `a` and `x` are rooting different objects. + /// assert!(!Rooted::ref_eq(&store, &a, &x)?); + /// + /// // You can also compare `Rooted`s and `ManuallyRooted`s with this + /// // function. + /// let d = a.to_manually_rooted(&mut store)?; + /// assert!(Rooted::ref_eq(&store, &a, &d)?); + /// + /// d.unroot(&mut store); + /// # Ok(()) + /// # } + /// ``` + pub fn ref_eq( + store: impl AsContext, + a: &impl RootedGcRef, + b: &impl RootedGcRef, + ) -> Result { + let store = store.as_context().0; + let a = a.try_gc_ref(store)?; + let b = b.try_gc_ref(store)?; + Ok(a == b) + } + + /// Hash this root. + /// + /// Note that, similar to `Rooted::rooted_eq`, this only operates on the + /// root and *not* the underlying GC reference. That means that two + /// different rootings of the same object will hash to different values + /// (modulo hash collisions). If this is undesirable, use the + /// [`ref_hash`][crate::Rooted::ref_hash] method instead. + pub fn rooted_hash(&self, state: &mut H) + where + H: std::hash::Hasher, + { + self.inner.hash(state); + } + + /// Hash the underlying rooted object reference. + /// + /// Note that, similar to `Rooted::ref_eq`, and operates on the underlying + /// rooted GC object reference, not the root. That means that two + /// *different* rootings of the same object will hash to the *same* + /// value. If this is undesirable, use the + /// [`rooted_hash`][crate::Rooted::rooted_hash] method instead. + pub fn ref_hash(&self, store: impl AsContext, state: &mut H) -> Result<()> + where + H: std::hash::Hasher, + { + let gc_ref = self.try_gc_ref(store.as_context().0)?; + gc_ref.hash(state); + Ok(()) + } +} + +/// Nested rooting scopes. +/// +/// `RootScope` allows the creation or nested rooting scopes for use with +/// [`Rooted`][crate::Rooted]. This allows for fine-grained control over how +/// long a set of [`Rooted`][crate::Rooted]s are strongly held alive, giving +/// gives you the tools necessary to avoid holding onto GC objects longer than +/// necessary. `Rooted`s created within a `RootScope` are automatically +/// unrooted when the `RootScope` is dropped. For more details on +/// [`Rooted`][crate::Rooted] lifetimes and their interaction with rooting +/// scopes, see [`Rooted`][crate::Rooted]'s documentation. +/// +/// A `RootScope` wraps a `C: AsContextMut` (that is, anything that +/// represents exclusive access to a [`Store`][crate::Store]) and in turn +/// implements [`AsContext`][crate::AsContext] and +/// [`AsContextMut`][crate::AsContextMut] in terms of its underlying +/// `C`. Therefore, `RootScope` can be used anywhere you would use the +/// underlying `C`, for example in the [`Global::get`][crate::Global::get] +/// method. Any `Rooted`s created by a method that a `RootScope` was +/// passed as context to are tied to the `RootScope`'s scope and +/// automatically unrooted when the scope is dropped. +/// +/// # Example +/// +/// ``` +/// # use wasmtime::*; +/// # fn _foo() -> Result<()> { +/// let mut store = Store::<()>::default(); +/// +/// let a: Rooted<_>; +/// let b: Rooted<_>; +/// let c: Rooted<_>; +/// +/// // Root `a` in the store's scope. It will be rooted for the duration of the +/// // store's lifetime. +/// a = ExternRef::new(&mut store, 42); +/// +/// // `a` is rooted, so we can access its data successfully. +/// assert!(a.data(&store).is_ok()); +/// +/// { +/// let mut scope1 = RootScope::new(&mut store); +/// +/// // Root `b` in `scope1`. +/// b = ExternRef::new(&mut scope1, 36); +/// +/// // Both `a` and `b` are rooted. +/// assert!(a.data(&scope1).is_ok()); +/// assert!(b.data(&scope1).is_ok()); +/// +/// { +/// let mut scope2 = RootScope::new(&mut scope1); +/// +/// // Root `c` in `scope2`. +/// c = ExternRef::new(&mut scope2, 36); +/// +/// // All of `a`, `b`, and `c` are rooted. +/// assert!(a.data(&scope2).is_ok()); +/// assert!(b.data(&scope2).is_ok()); +/// assert!(c.data(&scope2).is_ok()); +/// +/// // Drop `scope2`. +/// } +/// +/// // Now `a` and `b` are still rooted, but `c` was unrooted when we dropped +/// // `scope2`. +/// assert!(a.data(&scope1).is_ok()); +/// assert!(b.data(&scope1).is_ok()); +/// assert!(c.data(&scope1).is_err()); +/// +/// // Drop `scope1`. +/// } +/// +/// // And now only `a` is still rooted. Both `b` and `c` were unrooted when we +/// // dropped their respective rooting scopes. +/// assert!(a.data(&store).is_ok()); +/// assert!(b.data(&store).is_err()); +/// assert!(c.data(&store).is_err()); +/// # Ok(()) +/// # } +/// ``` +pub struct RootScope +where + C: AsContextMut, +{ + store: C, + initial_lifo_len: usize, +} + +impl Drop for RootScope +where + C: AsContextMut, +{ + fn drop(&mut self) { + let len = self.initial_lifo_len; + self.gc_roots().exit_lifo_scope(len); + } +} + +impl RootScope +where + C: AsContextMut, +{ + // NB: we MUST NOT expose a method like + // + // pub fn store(&mut self) -> &mut Store { ... } + // + // because callers could do treacherous things like + // + // let scope1 = RootScope::new(&mut store1); + // let scope2 = RootScope::new(&mut store2); + // std::mem::swap(scope1.store(), scope2.store()); + // + // and then we would start truncate the store's GC root set's LIFO roots to + // the wrong lengths. + // + // Instead, we just implement `AsContext[Mut]` for `RootScope`. + + /// Construct a new scope for rooting GC objects. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// let mut store = Store::<()>::default(); + /// + /// { + /// let mut scope = RootScope::new(&mut store); + /// + /// // Temporarily root GC objects in this nested rooting scope... + /// } + /// ``` + pub fn new(store: C) -> Self { + let initial_lifo_len = store.as_context().0.gc_roots().enter_lifo_scope(); + RootScope { + store, + initial_lifo_len, + } + } + + fn gc_roots(&mut self) -> &mut RootSet { + self.store.as_context_mut().0.gc_roots_mut() + } + + fn lifo_roots(&mut self) -> &mut Vec { + &mut self.gc_roots().lifo_roots + } + + /// Reserve enough capacity for `additional` GC roots in this scope. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// let mut store = Store::<()>::default(); + /// + /// { + /// let mut scope = RootScope::new(&mut store); + /// + /// // Ensure we have enough storage pre-allocated to root five GC + /// // references inside this scope without any underlying reallocation. + /// scope.reserve(5); + /// + /// // ... + /// } + /// ``` + pub fn reserve(&mut self, additional: usize) { + self.lifo_roots().reserve(additional); + } +} + +impl AsContext for RootScope +where + T: AsContextMut, +{ + type Data = T::Data; + + fn as_context(&self) -> crate::StoreContext<'_, Self::Data> { + self.store.as_context() + } +} + +impl AsContextMut for RootScope +where + T: AsContextMut, +{ + fn as_context_mut(&mut self) -> crate::StoreContextMut<'_, Self::Data> { + self.store.as_context_mut() + } +} + +/// A rooted reference to a garbage-collected `T` with arbitrary lifetime. +/// +/// A `ManuallyRooted` is a strong handle to a garbage-collected `T`, +/// preventing its referent (and anything else transitively referenced) from +/// being collected by the GC until [`unroot`][crate::ManuallyRooted::unroot] is +/// explicitly called. +/// +/// The primary way to create a `ManuallyRooted` is to promote a temporary +/// `Rooted` into a `ManuallyRooted` via its +/// [`to_manually_rooted`][crate::Rooted::to_manually_rooted] method. +/// +/// `ManuallyRooted` dereferences to its underlying `T`, allowing you to call +/// `T`'s methods. +/// +/// # Example +/// +/// ``` +/// # use wasmtime::*; +/// # fn _foo() -> Result<()> { +/// let mut store = Store::>>::default(); +/// +/// // Create our `ManuallyRooted` in a nested scope to avoid rooting it for +/// // the duration of the store's lifetime. +/// let x = { +/// let mut scope = RootScope::new(&mut store); +/// let x = ExternRef::new(&mut scope, 1234); +/// x.to_manually_rooted(&mut scope)? +/// }; +/// +/// // Place `x` into our store. +/// *store.data_mut() = Some(x); +/// +/// // Do a bunch stuff that may or may not access, replace, or take `x`... +/// +/// // At any time, in any arbitrary scope, we can remove `x` from the store +/// // and unroot it: +/// if let Some(x) = store.data_mut().take() { +/// x.unroot(&mut store); +/// } +/// # Ok(()) +/// # } +/// ``` +/// +/// # Differences Between `ManuallyRooted` and `Rooted` +/// +/// While `ManuallyRooted` can have arbitrary lifetimes, it requires manual +/// unrooting. This is in contrast to [`Rooted`][crate::Rooted] which is +/// restricted to strictly last-in-first-out (LIFO, aka stack order) lifetimes, +/// but comes with automatic unrooting. +/// +/// | Type | Supported Lifetimes | Unrooting | +/// |----------------------------------------------|-----------------------------|-----------| +/// | [`Rooted`][crate::Rooted] | Strictly LIFO / stack order | Automatic | +/// | [`ManuallyRooted`][crate::ManuallyRooted] | Arbitrary | Manual | +/// +/// `Rooted` should suffice for most use cases, and provides better +/// ergonomics, but `ManuallyRooted` exists as a fully-general escape hatch. +/// +/// # Manual Unrooting +/// +/// Failure to explicitly call [`unroot`][crate::ManuallyRooted::unroot] (or +/// another method that consumes `self` and unroots the reference, such as +/// [`into_rooted`][crate::ManuallyRooted::into_rooted]) will leak the +/// underlying GC object, preventing it from being garbage collected until its +/// owning [`Store`][crate::Store] is dropped. That means all of the following +/// will result in permanently rooting the underlying GC object: +/// +/// * Implicitly dropping a `ManuallyRooted`: +/// +/// ```no_run +/// # use wasmtime::*; +/// # let get_manually_rooted = || -> ManuallyRooted { todo!() }; +/// { +/// let perma_root: ManuallyRooted<_> = get_manually_rooted(); +/// +/// // `perma_root` is implicitly dropped at the end of its scope, +/// // permanently rooting/leaking its referent. +/// } +/// ``` +/// +/// * Explicitly dropping a `ManuallyRooted`: `drop(my_manually_rooted)`. +/// +/// * Forgetting a `ManuallyRooted`: `std::mem::forget(my_manually_rooted)`. +/// +/// * Inserting a `ManuallyRooted` into a `std::sync::Arc` or `std::rc::Rc` +/// cycle. +/// +/// * Etc... +/// +/// Wasmtime does *not* assert that a `ManuallyRooted` is unrooted on `Drop`, +/// or otherwise raise a panic, log a warning, or etc... on failure to manually +/// unroot. Sometimes leaking is intentional and desirable, particularly when +/// dealing with short-lived [`Store`][crate::Store]s where unrooting would just +/// be busy work since the whole store is about to be dropped. +pub struct ManuallyRooted +where + T: GcRef, +{ + inner: GcRootIndex, + _phantom: std::marker::PhantomData, +} + +impl Debug for ManuallyRooted { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let name = format!("ManuallyRooted<{}>", std::any::type_name::()); + f.debug_struct(&name).field("inner", &self.inner).finish() + } +} + +impl Deref for ManuallyRooted { + type Target = T; + + fn deref(&self) -> &Self::Target { + T::transmute_ref(&self.inner) + } +} + +impl ManuallyRooted +where + T: GcRef, +{ + /// Construct a new manually-rooted GC root. + /// + /// # Safety + /// + /// `gc_ref` must be a valid GC reference pointing to an instance of the GC + /// type that `T` represents. + /// + /// `gc_ref` must belong to `store`'s heap. + pub(crate) unsafe fn new(store: &mut AutoAssertNoGc<'_>, gc_ref: VMGcRef) -> Self { + let id = store.gc_roots_mut().manually_rooted.alloc(gc_ref); + ManuallyRooted { + inner: GcRootIndex { + store_id: store.id(), + generation: 0, + index: PackedIndex::new_manual(id), + }, + _phantom: std::marker::PhantomData, + } + } + + #[inline] + pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool { + debug_assert!(self.inner.index.is_manual()); + self.inner.comes_from_same_store(store) + } + + /// Clone this `ManuallyRooted`. + /// + /// Does not consume or unroot `self`: both `self` and the new + /// `ManuallyRooted` return value will need to be manually unrooted. + /// + /// # Panics + /// + /// Panics if `self` is not associated with the given `store`. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn _foo() -> Result<()> { + /// let mut store = Store::>>::default(); + /// + /// // Create our `ManuallyRooted` in a nested scope to avoid rooting it for + /// // the duration of the store's lifetime. + /// let x = { + /// let mut scope = RootScope::new(&mut store); + /// let x = ExternRef::new(&mut scope, 1234); + /// x.to_manually_rooted(&mut scope)? + /// }; + /// + /// // Push five clones of `x` into our store. + /// for _ in 0..5 { + /// let x_clone = x.clone(&mut store); + /// store.data_mut().push(x_clone); + /// } + /// # Ok(()) + /// # } + /// ``` + pub fn clone(&self, mut store: impl AsContextMut) -> Self { + self._clone(store.as_context_mut().0) + } + + pub(crate) fn _clone(&self, store: &mut StoreOpaque) -> Self { + let mut store = AutoAssertNoGc::new(store); + let gc_ref = *self + .get_gc_ref(&store) + .expect("ManuallyRooted always has a gc ref"); + // Safety: `gc_ref` belongs to this store, asserted by `get_gc_ref`. + let gc_ref = unsafe { clone_root(&mut store, gc_ref) }; + unsafe { Self::new(&mut store, gc_ref) } + } + + /// Unroot this GC object. + /// + /// Failure to call this method will result in the GC object, and anything + /// it transitively references, being kept alive (aka "leaking") for the + /// entirety of the store's lifetime. + /// + /// See the type-level docs for example usage. + pub fn unroot(self, mut store: impl AsContextMut) { + self._unroot(store.as_context_mut().0) + } + + pub(crate) fn _unroot(self, store: &mut StoreOpaque) { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + + let gc_ref = *self.get_gc_ref(store).unwrap(); + + let id = self.inner.index.as_manual().unwrap(); + let roots = store.gc_roots_mut(); + roots.manually_rooted.dealloc(id); + + // Safety: this `gc_ref` belongs to this store, asserted by + // `get_gc_ref`. + unsafe { + roots.unroot_gc_ref(gc_ref); + } + } + + /// Clone this `ManuallyRooted` into a `Rooted`. + /// + /// This operation does not consume or unroot this `ManuallyRooted`. + /// + /// The underlying GC object is re-rooted in the given context's scope. The + /// resulting `Rooted` is only valid during the given context's + /// scope. See the [`Rooted`][crate::Rooted] documentation for more + /// details on rooting scopes. + /// + /// This operation does not consume or unroot this `ManuallyRooted`. + /// + /// # Panics + /// + /// Panics if this object is not associated with the given context's store. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn _foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// let root1: Rooted<_>; + /// + /// let manual = { + /// let mut scope = RootScope::new(&mut store); + /// root1 = ExternRef::new(&mut scope, 1234); + /// root1.to_manually_rooted(&mut scope)? + /// }; + /// + /// // `root1` is no longer accessible because it was unrooted when `scope` + /// // was dropped. + /// assert!(root1.data(&store).is_err()); + /// + /// // But we can re-root `manual` into this scope. + /// let root2 = manual.to_rooted(&mut store); + /// assert!(root2.data(&store).is_ok()); + /// + /// // And we also still have access to `manual` and we still have to + /// // manually unroot it. + /// assert!(manual.data(&store).is_ok()); + /// manual.unroot(&mut store); + /// # Ok(()) + /// # } + /// ``` + pub fn to_rooted(&self, mut context: impl AsContextMut) -> Rooted { + self._to_rooted(context.as_context_mut().0) + } + + pub(crate) fn _to_rooted(&self, store: &mut StoreOpaque) -> Rooted { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + + let mut store = AutoAssertNoGc::new(store); + let gc_ref = *self.get_gc_ref(&store).unwrap(); + + // Safety: `gc_ref` is associated with this store, asserted by + // `get_gc_ref`. + let gc_ref = unsafe { clone_root(&mut store, gc_ref) }; + + // Safety: `gc_ref` points to a valid `T` because it came from `self`. + unsafe { Rooted::new(&mut store, gc_ref) } + } + + /// Convert this `ManuallyRooted` into a `Rooted`. + /// + /// The underlying GC object is re-rooted in the given context's scope. The + /// resulting `Rooted` is only valid during the given context's + /// scope. See the [`Rooted`][crate::Rooted] documentation for more + /// details on rooting scopes. + /// + /// This operation consumes and unroots this `ManuallyRooted`. + /// + /// # Panics + /// + /// Panics if this object is not associate with the given context's store. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn _foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// let root1: Rooted<_>; + /// + /// let manual = { + /// let mut scope = RootScope::new(&mut store); + /// root1 = ExternRef::new(&mut scope, 1234); + /// root1.to_manually_rooted(&mut scope)? + /// }; + /// + /// // `root1` is no longer accessible because it was unrooted when `scope` + /// // was dropped. + /// assert!(root1.data(&store).is_err()); + /// + /// // But we can re-root `manual` into this scope. + /// let root2 = manual.into_rooted(&mut store); + /// assert!(root2.data(&store).is_ok()); + /// + /// // `manual` was consumed by the `into_rooted` call, and we no longer + /// // have access to it, nor need to manually unroot it. + /// # Ok(()) + /// # } + /// ``` + pub fn into_rooted(self, mut context: impl AsContextMut) -> Rooted { + self._into_rooted(context.as_context_mut().0) + } + + pub(crate) fn _into_rooted(self, store: &mut StoreOpaque) -> Rooted { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + let rooted = self._to_rooted(store); + self._unroot(store); + rooted + } + + /// Are these two GC roots referencing the same underlying GC object? + /// + /// This function will return `true` even when `a` and `b` are different GC + /// roots (for example because they were rooted in different scopes) if they + /// are rooting the same underlying GC object. + /// + /// Because this method takes any `impl RootedGcRef` arguments, it can be + /// used to compare, for example, a `Rooted` and a `ManuallyRooted`. + /// + /// # Panics + /// + /// Panics if either `a` or `b` is not associated with the given `store`. + /// + /// # Example + /// + /// ``` + /// # use wasmtime::*; + /// # fn foo() -> Result<()> { + /// let mut store = Store::<()>::default(); + /// + /// let a = ExternRef::new_manually_rooted(&mut store, "hello"); + /// let b = a.clone(&mut store); + /// + /// // `a` and `b` are rooting the same object. + /// assert!(ManuallyRooted::ref_eq(&store, &a, &b)?); + /// + /// { + /// let mut scope = RootScope::new(&mut store); + /// + /// // `c` is a different GC root, is in a different scope, and is a + /// // `Rooted` instead of a `ManuallyRooted`, but is still rooting + /// // the same object. + /// let c = a.to_rooted(&mut scope); + /// assert!(ManuallyRooted::ref_eq(&scope, &a, &c)?); + /// } + /// + /// let x = ExternRef::new_manually_rooted(&mut store, "goodbye"); + /// + /// // `a` and `x` are rooting different objects. + /// assert!(!ManuallyRooted::ref_eq(&store, &a, &x)?); + /// + /// a.unroot(&mut store); + /// b.unroot(&mut store); + /// x.unroot(&mut store); + /// # Ok(()) + /// # } + /// ``` + pub fn ref_eq( + store: impl AsContext, + a: &impl RootedGcRef, + b: &impl RootedGcRef, + ) -> Result { + Rooted::ref_eq(store, a, b) + } + + /// Hash this root. + /// + /// Note that, similar to `Rooted::rooted_eq`, this only operates on the + /// root and *not* the underlying GC reference. That means that two + /// different rootings of the same object will hash to different values + /// (modulo hash collisions). If this is undesirable, use the + /// [`ref_hash`][crate::ManuallyRooted::ref_hash] method instead. + pub fn rooted_hash(&self, state: &mut H) + where + H: std::hash::Hasher, + { + self.inner.hash(state); + } + + /// Hash the underlying rooted object reference. + /// + /// Note that, similar to `Rooted::ref_eq`, and operates on the underlying + /// rooted GC object reference, not the root. That means that two + /// *different* rootings of the same object will hash to the *same* + /// value. If this is undesirable, use the + /// [`rooted_hash`][crate::Rooted::rooted_hash] method instead. + pub fn ref_hash(&self, store: impl AsContext, state: &mut H) + where + H: std::hash::Hasher, + { + let gc_ref = self + .get_gc_ref(store.as_context().0) + .expect("ManuallyRooted's get_gc_ref is infallible"); + gc_ref.hash(state); + } +} + +impl RootedGcRefImpl for ManuallyRooted { + fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef> { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + + let id = self.inner.index.as_manual().unwrap(); + store.gc_roots().manually_rooted.get(id) + } + + fn get_gc_ref_mut<'a>(&self, store: &'a mut StoreOpaque) -> Option<&'a mut VMGcRef> { + assert!( + self.comes_from_same_store(store), + "object used with wrong store" + ); + + let id = self.inner.index.as_manual().unwrap(); + store.gc_roots_mut().manually_rooted.get_mut(id) + } +} + +#[cfg(test)] +mod tests { + use crate::ExternRef; + + use super::*; + + #[test] + fn sizes() { + // Try to keep tabs on the size of these things. Don't want them growing + // unintentionally. + assert_eq!(std::mem::size_of::>(), 16); + assert_eq!(std::mem::size_of::>(), 16); + } +} diff --git a/crates/wasmtime/src/runtime/ref.rs b/crates/wasmtime/src/runtime/ref.rs deleted file mode 100644 index 1c4ca1110ce5..000000000000 --- a/crates/wasmtime/src/runtime/ref.rs +++ /dev/null @@ -1,9 +0,0 @@ -#[cfg(feature = "gc")] -mod gc_ref; -#[cfg(feature = "gc")] -pub use gc_ref::*; - -#[cfg(not(feature = "gc"))] -mod no_gc_ref; -#[cfg(not(feature = "gc"))] -pub use no_gc_ref::*; diff --git a/crates/wasmtime/src/runtime/ref/gc_ref.rs b/crates/wasmtime/src/runtime/ref/gc_ref.rs deleted file mode 100644 index 50cad8b670dc..000000000000 --- a/crates/wasmtime/src/runtime/ref/gc_ref.rs +++ /dev/null @@ -1,110 +0,0 @@ -use crate::AsContextMut; -use std::any::Any; -use std::ffi::c_void; -use wasmtime_runtime::VMExternRef; - -/// Represents an opaque reference to any data within WebAssembly. -#[derive(Clone, Debug)] -#[repr(transparent)] -pub struct ExternRef { - inner: VMExternRef, -} - -impl ExternRef { - /// Creates a new instance of `ExternRef` wrapping the given value. - pub fn new(value: T) -> ExternRef - where - T: 'static + Any + Send + Sync, - { - let inner = VMExternRef::new(value); - ExternRef { inner } - } - - pub(crate) fn from_vm_extern_ref(inner: VMExternRef) -> Self { - ExternRef { inner } - } - - pub(crate) fn into_vm_extern_ref(self) -> VMExternRef { - self.inner - } - - /// Get the underlying data for this `ExternRef`. - pub fn data(&self) -> &dyn Any { - &*self.inner - } - - /// Get the strong reference count for this `ExternRef`. - /// - /// Note that this loads the reference count with a `SeqCst` ordering to - /// synchronize with other threads. - pub fn strong_count(&self) -> usize { - self.inner.strong_count() - } - - /// Does this `ExternRef` point to the same inner value as `other`? - /// - /// This is *only* pointer equality, and does *not* run any inner value's - /// `Eq` implementation. - pub fn ptr_eq(&self, other: &ExternRef) -> bool { - VMExternRef::eq(&self.inner, &other.inner) - } - - /// Creates a new strongly-owned [`ExternRef`] from the raw value provided. - /// - /// This is intended to be used in conjunction with [`Func::new_unchecked`], - /// [`Func::call_unchecked`], and [`ValRaw`] with its `externref` field. - /// - /// This function assumes that `raw` is an externref value which is - /// currently rooted within the [`Store`]. - /// - /// # Unsafety - /// - /// This function is particularly `unsafe` because `raw` not only must be a - /// valid externref value produced prior by `to_raw` but it must also be - /// correctly rooted within the store. When arguments are provided to a - /// callback with [`Func::new_unchecked`], for example, or returned via - /// [`Func::call_unchecked`], if a GC is performed within the store then - /// floating externref values are not rooted and will be GC'd, meaning that - /// this function will no longer be safe to call with the values cleaned up. - /// This function must be invoked *before* possible GC operations can happen - /// (such as calling wasm). - /// - /// When in doubt try to not use this. Instead use the safe Rust APIs of - /// [`TypedFunc`] and friends. - /// - /// [`Func::call_unchecked`]: crate::Func::call_unchecked - /// [`Func::new_unchecked`]: crate::Func::new_unchecked - /// [`Store`]: crate::Store - /// [`TypedFunc`]: crate::TypedFunc - /// [`ValRaw`]: crate::ValRaw - pub unsafe fn from_raw(raw: *mut c_void) -> Option { - let raw = raw.cast::(); - let inner = VMExternRef::clone_from_raw(raw)?; - Some(ExternRef { inner }) - } - - /// Converts this [`ExternRef`] to a raw value suitable to store within a - /// [`ValRaw`]. - /// - /// # Unsafety - /// - /// Produces a raw value which is only safe to pass into a store if a GC - /// doesn't happen between when the value is produce and when it's passed - /// into the store. - /// - /// [`ValRaw`]: crate::ValRaw - pub unsafe fn to_raw(&self, mut store: impl AsContextMut) -> *mut c_void { - let externref_ptr = self.inner.as_raw(); - store - .as_context_mut() - .0 - .insert_vmexternref_without_gc(self.inner.clone()); - externref_ptr.cast() - } -} - -impl std::fmt::Pointer for ExternRef { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Pointer::fmt(&self.inner, f) - } -} diff --git a/crates/wasmtime/src/runtime/ref/no_gc_ref.rs b/crates/wasmtime/src/runtime/ref/no_gc_ref.rs deleted file mode 100644 index b83e08dd5dc5..000000000000 --- a/crates/wasmtime/src/runtime/ref/no_gc_ref.rs +++ /dev/null @@ -1,60 +0,0 @@ -//! The dummy `ExternRef` type used when the `gc` cargo feature is disabled. -//! -//! Providing a dummy type means that downstream users need to do fewer -//! `#[cfg(...)]`s versus if this type or its methods simply didn't exist. The -//! only methods that are left missing are constructors. - -#![allow(missing_docs)] - -use crate::runtime::Uninhabited; -use crate::AsContextMut; -use std::any::Any; -use std::ffi::c_void; -use wasmtime_runtime::VMExternRef; - -/// Represents an opaque reference to any data within WebAssembly. -/// -/// Due to compilation configuration, this is an uninhabited type: enable the -/// `gc` cargo feature to properly use this type. -#[derive(Clone, Debug)] -pub struct ExternRef { - _inner: Uninhabited, -} - -impl ExternRef { - pub(crate) fn from_vm_extern_ref(inner: VMExternRef) -> Self { - inner.assert_unreachable() - } - - pub(crate) fn into_vm_extern_ref(self) -> VMExternRef { - match self._inner {} - } - - pub fn data(&self) -> &dyn Any { - match self._inner {} - } - - pub fn strong_count(&self) -> usize { - match self._inner {} - } - - pub fn ptr_eq(&self, _other: &ExternRef) -> bool { - match self._inner {} - } - - pub unsafe fn from_raw(raw: *mut c_void) -> Option { - assert!(raw.is_null()); - None - } - - pub unsafe fn to_raw(&self, mut store: impl AsContextMut) -> *mut c_void { - let _ = &mut store; - match self._inner {} - } -} - -impl std::fmt::Pointer for ExternRef { - fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self._inner {} - } -} diff --git a/crates/wasmtime/src/runtime/store.rs b/crates/wasmtime/src/runtime/store.rs index ca0fc73c1259..fa3966264f85 100644 --- a/crates/wasmtime/src/runtime/store.rs +++ b/crates/wasmtime/src/runtime/store.rs @@ -76,6 +76,7 @@ //! contents of `StoreOpaque`. This is an invariant that we, as the authors of //! `wasmtime`, must uphold for the public interface to be safe. +use crate::gc::RootSet; use crate::instance::InstanceData; use crate::linker::Definition; use crate::module::{BareModuleInfo, RegisteredModuleId}; @@ -308,6 +309,7 @@ pub struct StoreOpaque { num_component_instances: usize, signal_handler: Option>>, externref_activations_table: wasmtime_runtime::VMExternRefActivationsTable, + gc_roots: RootSet, modules: ModuleRegistry, func_refs: FuncRefs, host_globals: Vec>, @@ -388,22 +390,16 @@ unsafe impl Send for AsyncState {} unsafe impl Sync for AsyncState {} /// An RAII type to automatically mark a region of code as unsafe for GC. -pub(crate) struct AutoAssertNoGc -where - T: std::ops::DerefMut, -{ +#[doc(hidden)] +pub struct AutoAssertNoGc<'a> { #[cfg(all(debug_assertions, feature = "gc"))] prev_okay: bool, - store: T, + store: &'a mut StoreOpaque, } -impl AutoAssertNoGc -where - T: std::ops::DerefMut, -{ +impl<'a> AutoAssertNoGc<'a> { #[inline] - pub fn new(mut store: T) -> Self { - let _ = &mut store; + pub fn new(store: &'a mut StoreOpaque) -> Self { #[cfg(all(debug_assertions, feature = "gc"))] { let prev_okay = store.externref_activations_table.set_gc_okay(false); @@ -416,30 +412,24 @@ where } } -impl std::ops::Deref for AutoAssertNoGc -where - T: std::ops::DerefMut, -{ - type Target = T; +impl std::ops::Deref for AutoAssertNoGc<'_> { + type Target = StoreOpaque; + #[inline] fn deref(&self) -> &Self::Target { - &self.store + &*self.store } } -impl std::ops::DerefMut for AutoAssertNoGc -where - T: std::ops::DerefMut, -{ +impl std::ops::DerefMut for AutoAssertNoGc<'_> { + #[inline] fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.store + &mut *self.store } } -impl Drop for AutoAssertNoGc -where - T: std::ops::DerefMut, -{ +impl Drop for AutoAssertNoGc<'_> { + #[inline] fn drop(&mut self) { #[cfg(all(debug_assertions, feature = "gc"))] { @@ -498,6 +488,7 @@ impl Store { num_component_instances: 0, signal_handler: None, externref_activations_table: wasmtime_runtime::VMExternRefActivationsTable::new(), + gc_roots: RootSet::default(), modules: ModuleRegistry::default(), func_refs: FuncRefs::default(), host_globals: Vec::new(), @@ -1386,6 +1377,16 @@ impl StoreOpaque { &mut self.externref_activations_table } + #[inline] + pub(crate) fn gc_roots(&self) -> &RootSet { + &self.gc_roots + } + + #[inline] + pub(crate) fn gc_roots_mut(&mut self) -> &mut RootSet { + &mut self.gc_roots + } + pub fn gc(&mut self) { // For this crate's API, we ensure that `set_stack_canary` invariants // are upheld for all host-->Wasm calls. diff --git a/crates/wasmtime/src/runtime/store/data.rs b/crates/wasmtime/src/runtime/store/data.rs index 390374985f90..6d8b83136341 100644 --- a/crates/wasmtime/src/runtime/store/data.rs +++ b/crates/wasmtime/src/runtime/store/data.rs @@ -201,7 +201,7 @@ where /// owned by a `Store` and will embed a `StoreId` internally to say which store /// it came from. Comparisons with this value are how panics are generated for /// mismatching the item that a store belongs to. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct StoreId(NonZeroU64); impl StoreId { diff --git a/crates/wasmtime/src/runtime/trampoline/global.rs b/crates/wasmtime/src/runtime/trampoline/global.rs index 89dc230a284b..1b0c672166ad 100644 --- a/crates/wasmtime/src/runtime/trampoline/global.rs +++ b/crates/wasmtime/src/runtime/trampoline/global.rs @@ -1,5 +1,5 @@ -use crate::store::StoreOpaque; -use crate::{GlobalType, HeapType, Mutability, Val}; +use crate::store::{AutoAssertNoGc, StoreOpaque}; +use crate::{GlobalType, HeapType, Mutability, Result, Val}; use std::ptr; use wasmtime_runtime::{StoreBox, VMGlobalDefinition}; @@ -33,7 +33,7 @@ pub fn generate_global_export( store: &mut StoreOpaque, ty: GlobalType, val: Val, -) -> wasmtime_runtime::ExportGlobal { +) -> Result { let global = wasmtime_environ::Global { wasm_ty: ty.content().to_wasm_type(), mutability: match ty.mutability() { @@ -46,6 +46,7 @@ pub fn generate_global_export( global: VMGlobalDefinition::new(), }); + let mut store = AutoAssertNoGc::new(store); let definition = unsafe { let global = &mut (*ctx.get()).global; match val { @@ -56,15 +57,18 @@ pub fn generate_global_export( Val::V128(x) => *global.as_u128_mut() = x.into(), Val::FuncRef(f) => { *global.as_func_ref_mut() = - f.map_or(ptr::null_mut(), |f| f.vm_func_ref(store).as_ptr()); + f.map_or(ptr::null_mut(), |f| f.vm_func_ref(&mut store).as_ptr()); } Val::ExternRef(x) => { - *global.as_externref_mut() = x.map(|x| x.into_vm_extern_ref()); + *global.as_externref_mut() = match x { + None => None, + Some(x) => Some(x.try_to_vm_extern_ref(&mut store)?), + }; } } global }; store.host_globals().push(ctx); - wasmtime_runtime::ExportGlobal { definition, global } + Ok(wasmtime_runtime::ExportGlobal { definition, global }) } diff --git a/crates/wasmtime/src/runtime/v128.rs b/crates/wasmtime/src/runtime/v128.rs index e0ad4571af9d..c76552ac34bc 100644 --- a/crates/wasmtime/src/runtime/v128.rs +++ b/crates/wasmtime/src/runtime/v128.rs @@ -3,8 +3,8 @@ allow(unused_imports) )] -use crate::store::StoreOpaque; -use crate::{ValRaw, ValType, WasmTy}; +use crate::store::{AutoAssertNoGc, StoreOpaque}; +use crate::{Result, ValRaw, ValType, WasmTy}; use std::cmp::Ordering; use std::fmt; use wasmtime_runtime::V128Abi; @@ -120,12 +120,12 @@ unsafe impl WasmTy for V128 { } #[inline] - fn into_abi(self, _store: &mut StoreOpaque) -> Self::Abi { - self.0 + fn into_abi(self, _store: &mut AutoAssertNoGc<'_>) -> Result { + Ok(self.0) } #[inline] - unsafe fn from_abi(abi: Self::Abi, _store: &mut StoreOpaque) -> Self { + unsafe fn from_abi(abi: Self::Abi, _store: &mut AutoAssertNoGc<'_>) -> Self { V128(abi) } } diff --git a/crates/wasmtime/src/runtime/values.rs b/crates/wasmtime/src/runtime/values.rs index 8bb41362bcb0..def009fe57de 100644 --- a/crates/wasmtime/src/runtime/values.rs +++ b/crates/wasmtime/src/runtime/values.rs @@ -1,6 +1,6 @@ -use crate::r#ref::ExternRef; -use crate::store::StoreOpaque; -use crate::{AsContext, AsContextMut, Func, HeapType, RefType, ValType, V128}; +use crate::gc::ExternRef; +use crate::store::{AutoAssertNoGc, StoreOpaque}; +use crate::{AsContext, AsContextMut, Func, HeapType, RefType, Rooted, ValType, V128}; use anyhow::{bail, Context, Result}; use std::ptr; use wasmtime_runtime::TableElement; @@ -42,7 +42,7 @@ pub enum Val { FuncRef(Option), /// An external reference. - ExternRef(Option), + ExternRef(Option>), } macro_rules! accessors { @@ -165,30 +165,33 @@ impl Val { /// Convenience method to convert this [`Val`] into a [`ValRaw`]. /// + /// Returns an error if this value is a GC reference and the GC reference + /// has been unrooted. + /// /// # Unsafety /// /// This method is unsafe for the reasons that [`ExternRef::to_raw`] and /// [`Func::to_raw`] are unsafe. - pub unsafe fn to_raw(&self, store: impl AsContextMut) -> ValRaw { + pub unsafe fn to_raw(&self, store: impl AsContextMut) -> Result { match self { - Val::I32(i) => ValRaw::i32(*i), - Val::I64(i) => ValRaw::i64(*i), - Val::F32(u) => ValRaw::f32(*u), - Val::F64(u) => ValRaw::f64(*u), - Val::V128(b) => ValRaw::v128(b.as_u128()), + Val::I32(i) => Ok(ValRaw::i32(*i)), + Val::I64(i) => Ok(ValRaw::i64(*i)), + Val::F32(u) => Ok(ValRaw::f32(*u)), + Val::F64(u) => Ok(ValRaw::f64(*u)), + Val::V128(b) => Ok(ValRaw::v128(b.as_u128())), Val::ExternRef(e) => { let externref = match e { None => ptr::null_mut(), - Some(e) => e.to_raw(store), + Some(e) => e.to_raw(store)?, }; - ValRaw::externref(externref) + Ok(ValRaw::externref(externref)) } Val::FuncRef(f) => { let funcref = match f { Some(f) => f.to_raw(store), None => ptr::null_mut(), }; - ValRaw::funcref(funcref) + Ok(ValRaw::funcref(funcref)) } } } @@ -213,7 +216,7 @@ impl Val { Func::from_raw(store, raw.get_funcref()).into() } HeapType::NoFunc => Ref::Func(None), - HeapType::Extern => ExternRef::from_raw(raw.get_externref()).into(), + HeapType::Extern => ExternRef::from_raw(store, raw.get_externref()).into(), }; assert!( ref_ty.is_nullable() || !ref_.is_null(), @@ -231,7 +234,7 @@ impl Val { (F32(f32) f32 unwrap_f32 f32::from_bits(*e)) (F64(f64) f64 unwrap_f64 f64::from_bits(*e)) (FuncRef(Option<&Func>) func_ref unwrap_func_ref e.as_ref()) - (ExternRef(Option<&ExternRef>) extern_ref unwrap_extern_ref e.as_ref()) + (ExternRef(Option<&Rooted>) extern_ref unwrap_extern_ref e.as_ref()) (V128(V128) v128 unwrap_v128 *e) } @@ -253,7 +256,7 @@ impl Val { /// /// If this is a non-null `externref`, then `Some(Some(..))` is returned. #[inline] - pub fn externref(&self) -> Option> { + pub fn externref(&self) -> Option>> { match self { Val::ExternRef(None) => Some(None), Val::ExternRef(Some(e)) => Some(Some(e)), @@ -272,7 +275,7 @@ impl Val { /// /// Panics if `self` is not a (nullable) `externref`. #[inline] - pub fn unwrap_externref(&self) -> Option<&ExternRef> { + pub fn unwrap_externref(&self) -> Option<&Rooted> { self.externref().expect("expected externref") } @@ -313,15 +316,13 @@ impl Val { Val::FuncRef(Some(f)) => f.comes_from_same_store(store), Val::FuncRef(None) => true, - // Integers, floats, vectors, and `externref`s have no association - // with any particular store, so they're always considered as "yes I - // came from that store", - Val::I32(_) - | Val::I64(_) - | Val::F32(_) - | Val::F64(_) - | Val::V128(_) - | Val::ExternRef(_) => true, + Val::ExternRef(Some(x)) => x.comes_from_same_store(store), + Val::ExternRef(None) => true, + + // Integers, floats, and vectors have no association with any + // particular store, so they're always considered as "yes I came + // from that store", + Val::I32(_) | Val::I64(_) | Val::F32(_) | Val::F64(_) | Val::V128(_) => true, } } } @@ -364,16 +365,16 @@ impl From for Val { } } -impl From for Val { +impl From> for Val { #[inline] - fn from(val: ExternRef) -> Val { + fn from(val: Rooted) -> Val { Val::ExternRef(Some(val)) } } -impl From> for Val { +impl From>> for Val { #[inline] - fn from(val: Option) -> Val { + fn from(val: Option>) -> Val { Val::ExternRef(val) } } @@ -479,7 +480,7 @@ pub enum Ref { /// /// Wasm can create null external references via the `ref.null extern` /// instruction. - Extern(Option), + Extern(Option>), } impl From for Ref { @@ -496,16 +497,16 @@ impl From> for Ref { } } -impl From for Ref { +impl From> for Ref { #[inline] - fn from(e: ExternRef) -> Ref { + fn from(e: Rooted) -> Ref { Ref::Extern(Some(e)) } } -impl From> for Ref { +impl From>> for Ref { #[inline] - fn from(e: Option) -> Ref { + fn from(e: Option>) -> Ref { Ref::Extern(e) } } @@ -541,7 +542,7 @@ impl Ref { /// /// Returns `Some(Some(_))` if this `Ref` is a non-null `extern` reference. #[inline] - pub fn as_extern(&self) -> Option> { + pub fn as_extern(&self) -> Option>> { match self { Ref::Extern(e) => Some(e.as_ref()), _ => None, @@ -555,7 +556,7 @@ impl Ref { /// /// Returns `Some(_)` if this `Ref` is a non-null `extern` reference. #[inline] - pub fn unwrap_extern(&self) -> Option<&ExternRef> { + pub fn unwrap_extern(&self) -> Option<&Rooted> { self.as_extern() .expect("Ref::unwrap_extern on non-extern reference") } @@ -663,10 +664,8 @@ impl Ref { match self { Ref::Func(Some(f)) => f.comes_from_same_store(store), Ref::Func(None) => true, - - // `ExternRef`s aren't associated with any single store right - // now. That may change in the future. - Ref::Extern(_) => true, + Ref::Extern(Some(x)) => x.comes_from_same_store(store), + Ref::Extern(None) => true, } } @@ -675,7 +674,8 @@ impl Ref { store: &mut StoreOpaque, ty: &RefType, ) -> Result { - self.ensure_matches_ty(store, &ty) + let mut store = AutoAssertNoGc::new(store); + self.ensure_matches_ty(&store, &ty) .context("type mismatch: value does not match table element type")?; match (self, ty.heap_type()) { (Ref::Func(None), HeapType::NoFunc | HeapType::Func | HeapType::Concrete(_)) => { @@ -684,10 +684,10 @@ impl Ref { } (Ref::Func(Some(f)), HeapType::Func | HeapType::Concrete(_)) => { debug_assert!( - f.comes_from_same_store(store), + f.comes_from_same_store(&store), "checked in `ensure_matches_ty`" ); - Ok(TableElement::FuncRef(f.vm_func_ref(store).as_ptr())) + Ok(TableElement::FuncRef(f.vm_func_ref(&mut store).as_ptr())) } (Ref::Extern(e), HeapType::Extern) => match e { @@ -695,7 +695,9 @@ impl Ref { assert!(ty.is_nullable()); Ok(TableElement::ExternRef(None)) } - Some(e) => Ok(TableElement::ExternRef(Some(e.into_vm_extern_ref()))), + Some(e) => Ok(TableElement::ExternRef(Some( + e.try_to_vm_extern_ref(&mut store)?, + ))), }, _ => unreachable!("checked that the value matches the type above"), diff --git a/crates/wast/src/core.rs b/crates/wast/src/core.rs index 71389b966c3f..5a9abef3a2ae 100644 --- a/crates/wast/src/core.rs +++ b/crates/wast/src/core.rs @@ -1,11 +1,11 @@ use anyhow::{bail, Context, Result}; use std::fmt::{Display, LowerHex}; -use wasmtime::{ExternRef, Val}; +use wasmtime::{ExternRef, Store, Val}; use wast::core::{HeapType, NanPattern, V128Pattern, WastArgCore, WastRetCore}; use wast::token::{Float32, Float64}; /// Translate from a `script::Value` to a `RuntimeValue`. -pub fn val(v: &WastArgCore<'_>) -> Result { +pub fn val(store: &mut Store, v: &WastArgCore<'_>) -> Result { use wast::core::WastArgCore::*; Ok(match v { @@ -16,7 +16,7 @@ pub fn val(v: &WastArgCore<'_>) -> Result { V128(x) => Val::V128(u128::from_le_bytes(x.to_le_bytes()).into()), RefNull(HeapType::Extern) => Val::ExternRef(None), RefNull(HeapType::Func) => Val::FuncRef(None), - RefExtern(x) => Val::ExternRef(Some(ExternRef::new(*x))), + RefExtern(x) => Val::ExternRef(Some(ExternRef::new(store, *x))), other => bail!("couldn't convert {:?} to a runtime value", other), }) } @@ -37,15 +37,15 @@ fn extract_lane_as_i64(bytes: u128, lane: usize) -> i64 { (bytes >> (lane * 64)) as i64 } -pub fn match_val(actual: &Val, expected: &WastRetCore) -> Result<()> { +pub fn match_val(store: &Store, actual: &Val, expected: &WastRetCore) -> Result<()> { match (actual, expected) { (_, WastRetCore::Either(expected)) => { for expected in expected { - if match_val(actual, expected).is_ok() { + if match_val(store, actual, expected).is_ok() { return Ok(()); } } - match_val(actual, &expected[0]) + match_val(store, actual, &expected[0]) } (Val::I32(a), WastRetCore::I32(b)) => match_int(a, b), @@ -67,7 +67,7 @@ pub fn match_val(actual: &Val, expected: &WastRetCore) -> Result<()> { } (Val::ExternRef(Some(x)), WastRetCore::RefNull(Some(HeapType::Extern))) => { let x = x - .data() + .data(store)? .downcast_ref::() .expect("only u32 externrefs created in wast test suites"); bail!("expected null externref, found non-null externref of {x}"); @@ -80,7 +80,7 @@ pub fn match_val(actual: &Val, expected: &WastRetCore) -> Result<()> { (Val::FuncRef(Some(_)), WastRetCore::RefFunc(_)) => Ok(()), (Val::ExternRef(Some(x)), WastRetCore::RefExtern(Some(y))) => { let x = x - .data() + .data(store)? .downcast_ref::() .expect("only u32 externrefs created in wast test suites"); if x == y { diff --git a/crates/wast/src/wast.rs b/crates/wast/src/wast.rs index 883ebd9fe508..6763bd3f8bca 100644 --- a/crates/wast/src/wast.rs +++ b/crates/wast/src/wast.rs @@ -173,7 +173,7 @@ where .args .iter() .map(|v| match v { - WastArg::Core(v) => core::val(v), + WastArg::Core(v) => core::val(&mut self.store, v), WastArg::Component(_) => bail!("expected component function, found core"), }) .collect::>>()?; @@ -309,7 +309,8 @@ where bail!("expected component value found core value") } }; - core::match_val(v, e).with_context(|| format!("result {} didn't match", i))?; + core::match_val(&self.store, v, e) + .with_context(|| format!("result {} didn't match", i))?; } } #[cfg(feature = "component-model")] diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 9e2e049039ef..42c343f73838 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -44,7 +44,6 @@ enable_testing() # Add all examples create_target(async async.cpp) -create_target(externref externref.c) create_target(fib-debug fib-debug/main.c) create_target(fuel fuel.c) create_target(gcd gcd.c) @@ -64,7 +63,6 @@ create_rust_wasm(tokio wasm32-wasi) create_rust_wasm(wasi wasm32-wasi) create_rust_wasm(component wasm32-unknown-unknown) create_rust_test(epochs) -create_rust_test(externref) create_rust_test(fib-debug) create_rust_test(fuel) create_rust_test(gcd) diff --git a/examples/externref.c b/examples/externref.c deleted file mode 100644 index f9924bb93fd4..000000000000 --- a/examples/externref.c +++ /dev/null @@ -1,204 +0,0 @@ -/* -Example of using `externref` values. - -You can compile and run this example on Linux with: - - cargo build --release -p wasmtime-c-api - cc examples/externref.c \ - -I crates/c-api/include \ - -I crates/c-api/wasm-c-api/include \ - target/release/libwasmtime.a \ - -lpthread -ldl -lm \ - -o externref - ./externref - -Note that on Windows and macOS the command will be similar, but you'll need -to tweak the `-lpthread` and such annotations as well as the name of the -`libwasmtime.a` file on Windows. - -You can also build using cmake: - -mkdir build && cd build && cmake .. && \ - cmake --build . --target wasmtime-externref -*/ - -#include -#include -#include -#include -#include -#include - -static void exit_with_error(const char *message, wasmtime_error_t *error, - wasm_trap_t *trap); - -int main() { - int ret = 0; - bool ok = true; - // Create a new configuration with Wasm reference types enabled. - printf("Initializing...\n"); - wasm_config_t *config = wasm_config_new(); - assert(config != NULL); - wasmtime_config_wasm_reference_types_set(config, true); - - // Create an *engine*, which is a compilation context, with our configured - // options. - wasm_engine_t *engine = wasm_engine_new_with_config(config); - assert(engine != NULL); - - // With an engine we can create a *store* which is a long-lived group of wasm - // modules. - wasmtime_store_t *store = wasmtime_store_new(engine, NULL, NULL); - assert(store != NULL); - wasmtime_context_t *context = wasmtime_store_context(store); - - // Read our input file, which in this case is a wasm text file. - FILE *file = fopen("examples/externref.wat", "r"); - assert(file != NULL); - fseek(file, 0L, SEEK_END); - size_t file_size = ftell(file); - fseek(file, 0L, SEEK_SET); - wasm_byte_vec_t wat; - wasm_byte_vec_new_uninitialized(&wat, file_size); - if (fread(wat.data, file_size, 1, file) != 1) { - printf("> Error loading module!\n"); - return 1; - } - fclose(file); - - // Parse the wat into the binary wasm format - wasm_byte_vec_t wasm; - wasmtime_error_t *error = wasmtime_wat2wasm(wat.data, wat.size, &wasm); - if (error != NULL) - exit_with_error("failed to parse wat", error, NULL); - wasm_byte_vec_delete(&wat); - - // Now that we've got our binary webassembly we can compile our module. - printf("Compiling module...\n"); - wasmtime_module_t *module = NULL; - error = wasmtime_module_new(engine, (uint8_t *)wasm.data, wasm.size, &module); - wasm_byte_vec_delete(&wasm); - if (error != NULL) - exit_with_error("failed to compile module", error, NULL); - - // Instantiate the module. - printf("Instantiating module...\n"); - wasm_trap_t *trap = NULL; - wasmtime_instance_t instance; - error = wasmtime_instance_new(context, module, NULL, 0, &instance, &trap); - if (error != NULL || trap != NULL) - exit_with_error("failed to instantiate", error, trap); - - printf("Creating new `externref`...\n"); - - // Create a new `externref` value. - // - // Note that the NULL here is a finalizer callback, but we don't need one for - // this example. - wasmtime_externref_t *externref = - wasmtime_externref_new("Hello, World!", NULL); - - // The `externref`'s wrapped data should be the string "Hello, World!". - void *data = wasmtime_externref_data(externref); - assert(strcmp((char *)data, "Hello, World!") == 0); - - printf("Touching `externref` table...\n"); - - wasmtime_extern_t item; - - // Lookup the `table` export. - ok = wasmtime_instance_export_get(context, &instance, "table", - strlen("table"), &item); - assert(ok); - assert(item.kind == WASMTIME_EXTERN_TABLE); - - // Set `table[3]` to our `externref`. - wasmtime_val_t externref_val; - externref_val.kind = WASMTIME_EXTERNREF; - externref_val.of.externref = externref; - error = wasmtime_table_set(context, &item.of.table, 3, &externref_val); - if (error != NULL) - exit_with_error("failed to set table", error, NULL); - - // `table[3]` should now be our `externref`. - wasmtime_val_t elem; - ok = wasmtime_table_get(context, &item.of.table, 3, &elem); - assert(ok); - assert(elem.kind == WASMTIME_EXTERNREF); - assert(strcmp((char *)wasmtime_externref_data(elem.of.externref), - "Hello, World!") == 0); - wasmtime_val_delete(&elem); - - printf("Touching `externref` global...\n"); - - // Lookup the `global` export. - ok = wasmtime_instance_export_get(context, &instance, "global", - strlen("global"), &item); - assert(ok); - assert(item.kind == WASMTIME_EXTERN_GLOBAL); - - // Set the global to our `externref`. - error = wasmtime_global_set(context, &item.of.global, &externref_val); - if (error != NULL) - exit_with_error("failed to set global", error, NULL); - - // Get the global, and it should return our `externref` again. - wasmtime_val_t global_val; - wasmtime_global_get(context, &item.of.global, &global_val); - assert(global_val.kind == WASMTIME_EXTERNREF); - assert(strcmp((char *)wasmtime_externref_data(elem.of.externref), - "Hello, World!") == 0); - wasmtime_val_delete(&global_val); - - printf("Calling `externref` func...\n"); - - // Lookup the `func` export. - ok = wasmtime_instance_export_get(context, &instance, "func", strlen("func"), - &item); - assert(ok); - assert(item.kind == WASMTIME_EXTERN_FUNC); - - // And call it! - wasmtime_val_t results[1]; - error = wasmtime_func_call(context, &item.of.func, &externref_val, 1, results, - 1, &trap); - if (error != NULL || trap != NULL) - exit_with_error("failed to call function", error, trap); - - // `func` returns the same reference we gave it, so `results[0]` should be our - // `externref`. - assert(results[0].kind == WASMTIME_EXTERNREF); - assert(strcmp((char *)wasmtime_externref_data(results[0].of.externref), - "Hello, World!") == 0); - wasmtime_val_delete(&results[0]); - - // We can GC any now-unused references to our externref that the store is - // holding. - printf("GCing within the store...\n"); - wasmtime_context_gc(context); - - // Clean up after ourselves at this point - printf("All finished!\n"); - ret = 0; - - wasmtime_store_delete(store); - wasmtime_module_delete(module); - wasm_engine_delete(engine); - return 0; -} - -static void exit_with_error(const char *message, wasmtime_error_t *error, - wasm_trap_t *trap) { - fprintf(stderr, "error: %s\n", message); - wasm_byte_vec_t error_message; - if (error != NULL) { - wasmtime_error_message(error, &error_message); - wasmtime_error_delete(error); - } else { - wasm_trap_message(trap, &error_message); - wasm_trap_delete(trap); - } - fprintf(stderr, "%.*s\n", (int)error_message.size, error_message.data); - wasm_byte_vec_delete(&error_message); - exit(1); -} diff --git a/examples/externref.rs b/examples/externref.rs index 3dfe4218a5ed..1301c6c72b1b 100644 --- a/examples/externref.rs +++ b/examples/externref.rs @@ -18,10 +18,13 @@ fn main() -> Result<()> { let instance = Instance::new(&mut store, &module, &[])?; println!("Creating new `externref`..."); - let externref = ExternRef::new("Hello, World!"); - assert!(externref.data().is::<&'static str>()); + let externref = ExternRef::new(&mut store, "Hello, World!"); + assert!(externref.data(&store)?.is::<&'static str>()); assert_eq!( - *externref.data().downcast_ref::<&'static str>().unwrap(), + *externref + .data(&store)? + .downcast_ref::<&'static str>() + .unwrap(), "Hello, World!" ); @@ -32,22 +35,23 @@ fn main() -> Result<()> { .get(&mut store, 3) .unwrap() // assert in bounds .unwrap_extern() // assert it's an externref table - .cloned() + .copied() .unwrap(); // assert the externref isn't null - assert!(elem.ptr_eq(&externref)); + assert!(Rooted::ref_eq(&store, &elem, &externref)?); println!("Touching `externref` global..."); let global = instance.get_global(&mut store, "global").unwrap(); global.set(&mut store, Some(externref.clone()).into())?; - let global_val = global.get(&mut store).unwrap_externref().cloned().unwrap(); - assert!(global_val.ptr_eq(&externref)); + let global_val = global.get(&mut store).unwrap_externref().copied().unwrap(); + assert!(Rooted::ref_eq(&store, &global_val, &externref)?); println!("Calling `externref` func..."); - let func = - instance.get_typed_func::, Option>(&mut store, "func")?; - let ret = func.call(&mut store, Some(externref.clone()))?; + let func = instance.get_typed_func::>, Option>>( + &mut store, "func", + )?; + let ret = func.call(&mut store, Some(externref))?; assert!(ret.is_some()); - assert!(ret.unwrap().ptr_eq(&externref)); + assert!(Rooted::ref_eq(&store, &ret.unwrap(), &externref)?); println!("GCing within the store..."); store.gc(); diff --git a/tests/all/async_functions.rs b/tests/all/async_functions.rs index 56b3017c81e5..9c15141f1a2e 100644 --- a/tests/all/async_functions.rs +++ b/tests/all/async_functions.rs @@ -922,6 +922,8 @@ async fn non_stacky_async_activations() -> Result<()> { #[tokio::test] async fn gc_preserves_externref_on_historical_async_stacks() -> Result<()> { + let _ = env_logger::try_init(); + let mut config = Config::new(); config.async_support(true); let engine = Engine::new(&config)?; @@ -952,27 +954,30 @@ async fn gc_preserves_externref_on_historical_async_stacks() -> Result<()> { "#, )?; - type F = TypedFunc<(i32, Option), ()>; + type F = TypedFunc<(i32, Option>), ()>; let mut store = Store::new(&engine, None); let mut linker = Linker::>::new(&engine); linker.func_wrap("", "gc", |mut cx: Caller<'_, _>| cx.gc())?; - linker.func_wrap("", "test", |val: i32, handle: Option| { - assert_eq!(handle.unwrap().data().downcast_ref(), Some(&val)); - })?; + linker.func_wrap( + "", + "test", + |cx: Caller<'_, _>, val: i32, handle: Option>| -> Result<()> { + assert_eq!(handle.unwrap().data(&cx)?.downcast_ref(), Some(&val)); + Ok(()) + }, + )?; linker.func_wrap1_async("", "recurse", |mut cx: Caller<'_, _>, val: i32| { let func = cx.data().clone().unwrap(); - Box::new(async move { - func.call_async(&mut cx, (val, Some(ExternRef::new(val)))) - .await - }) + let r = Some(ExternRef::new(&mut cx, val)); + Box::new(async move { func.call_async(&mut cx, (val, r)).await }) })?; let instance = linker.instantiate_async(&mut store, &module).await?; let func: F = instance.get_typed_func(&mut store, "run")?; *store.data_mut() = Some(func.clone()); - func.call_async(&mut store, (5, Some(ExternRef::new(5)))) - .await?; + let r = Some(ExternRef::new(&mut store, 5)); + func.call_async(&mut store, (5, r)).await?; Ok(()) } diff --git a/tests/all/call_hook.rs b/tests/all/call_hook.rs index f116039c8ee8..54777f4922b2 100644 --- a/tests/all/call_hook.rs +++ b/tests/all/call_hook.rs @@ -95,10 +95,10 @@ fn call_wrapped_func() -> Result<(), Error> { unsafe { let mut args = [ - Val::I32(1).to_raw(&mut store), - Val::I64(2).to_raw(&mut store), - Val::F32(3.0f32.to_bits()).to_raw(&mut store), - Val::F64(4.0f64.to_bits()).to_raw(&mut store), + Val::I32(1).to_raw(&mut store)?, + Val::I64(2).to_raw(&mut store)?, + Val::F32(3.0f32.to_bits()).to_raw(&mut store)?, + Val::F64(4.0f64.to_bits()).to_raw(&mut store)?, ]; f.call_unchecked(&mut store, args.as_mut_ptr(), args.len())?; } diff --git a/tests/all/externals.rs b/tests/all/externals.rs index 9950439e9061..29f8122c7680 100644 --- a/tests/all/externals.rs +++ b/tests/all/externals.rs @@ -161,24 +161,23 @@ fn get_set_externref_globals_via_api() -> anyhow::Result<()> { )?; assert!(global.get(&mut store).unwrap_externref().is_none()); - global.set( - &mut store, - Val::ExternRef(Some(ExternRef::new("hello".to_string()))), - )?; + let hello = ExternRef::new(&mut store, "hello".to_string()); + global.set(&mut store, hello.into())?; let r = global.get(&mut store).unwrap_externref().cloned().unwrap(); - assert!(r.data().is::()); - assert_eq!(r.data().downcast_ref::().unwrap(), "hello"); + assert!(r.data(&store)?.is::()); + assert_eq!(r.data(&store)?.downcast_ref::().unwrap(), "hello"); // Initialize with a non-null externref. + let externref = ExternRef::new(&mut store, 42_i32); let global = Global::new( &mut store, GlobalType::new(ValType::EXTERNREF, Mutability::Const), - Val::ExternRef(Some(ExternRef::new(42_i32))), + externref.into(), )?; let r = global.get(&mut store).unwrap_externref().cloned().unwrap(); - assert!(r.data().is::()); - assert_eq!(r.data().downcast_ref::().copied().unwrap(), 42); + assert!(r.data(&store)?.is::()); + assert_eq!(r.data(&store)?.downcast_ref::().copied().unwrap(), 42); Ok(()) } @@ -288,11 +287,8 @@ fn create_get_set_externref_tables_via_api() -> anyhow::Result<()> { let mut store = Store::new(&engine, ()); let table_ty = TableType::new(RefType::EXTERNREF, 10, None); - let table = Table::new( - &mut store, - table_ty, - Ref::Extern(Some(ExternRef::new(42_usize))), - )?; + let init = ExternRef::new(&mut store, 42_usize); + let table = Table::new(&mut store, table_ty, init.into())?; assert_eq!( *table @@ -300,7 +296,7 @@ fn create_get_set_externref_tables_via_api() -> anyhow::Result<()> { .unwrap() .unwrap_extern() .unwrap() - .data() + .data(&store)? .downcast_ref::() .unwrap(), 42 @@ -325,12 +321,8 @@ fn fill_externref_tables_via_api() -> anyhow::Result<()> { assert!(table.get(&mut store, i).unwrap().unwrap_extern().is_none()); } - table.fill( - &mut store, - 2, - Ref::Extern(Some(ExternRef::new(42_usize))), - 4, - )?; + let val = ExternRef::new(&mut store, 42_usize); + table.fill(&mut store, 2, val.into(), 4)?; for i in (0..2).chain(7..10) { assert!(table.get(&mut store, i).unwrap().unwrap_extern().is_none()); @@ -342,7 +334,7 @@ fn fill_externref_tables_via_api() -> anyhow::Result<()> { .unwrap() .unwrap_extern() .unwrap() - .data() + .data(&store)? .downcast_ref::() .unwrap(), 42 diff --git a/tests/all/func.rs b/tests/all/func.rs index d0f04b52667b..acaed21a9b36 100644 --- a/tests/all/func.rs +++ b/tests/all/func.rs @@ -385,8 +385,10 @@ fn func_constructors() { Func::wrap(&mut store, || -> i64 { 0 }); Func::wrap(&mut store, || -> f32 { 0.0 }); Func::wrap(&mut store, || -> f64 { 0.0 }); - Func::wrap(&mut store, || -> ExternRef { loop {} }); - Func::wrap(&mut store, || -> Option { None }); + Func::wrap(&mut store, || -> Rooted { loop {} }); + Func::wrap(&mut store, || -> Option> { None }); + Func::wrap(&mut store, || -> ManuallyRooted { loop {} }); + Func::wrap(&mut store, || -> Option> { None }); Func::wrap(&mut store, || -> Func { loop {} }); Func::wrap(&mut store, || -> Option { None }); Func::wrap(&mut store, || -> NoFunc { loop {} }); @@ -397,8 +399,17 @@ fn func_constructors() { Func::wrap(&mut store, || -> Result { loop {} }); Func::wrap(&mut store, || -> Result { loop {} }); Func::wrap(&mut store, || -> Result { loop {} }); - Func::wrap(&mut store, || -> Result { loop {} }); - Func::wrap(&mut store, || -> Result> { loop {} }); + Func::wrap(&mut store, || -> Result> { loop {} }); + Func::wrap(&mut store, || -> Result>> { + loop {} + }); + Func::wrap(&mut store, || -> Result> { + loop {} + }); + Func::wrap( + &mut store, + || -> Result>> { loop {} }, + ); Func::wrap(&mut store, || -> Result { loop {} }); Func::wrap(&mut store, || -> Result> { loop {} }); Func::wrap(&mut store, || -> Result { loop {} }); @@ -485,19 +496,26 @@ fn signatures_match() { let f = Func::wrap( &mut store, - |_: f32, _: f64, _: i32, _: i64, _: i32, _: Option, _: Option| -> f64 { - loop {} - }, + |_: f32, + _: f64, + _: i32, + _: i64, + _: i32, + _: Option>, + _: Option>, + _: Option| + -> f64 { loop {} }, ); - assert_eq!(f.ty(&store).params().len(), 7); + assert_eq!(f.ty(&store).params().len(), 8); assert!(f.ty(&store).params().nth(0).unwrap().is_f32()); assert!(f.ty(&store).params().nth(1).unwrap().is_f64()); assert!(f.ty(&store).params().nth(2).unwrap().is_i32()); assert!(f.ty(&store).params().nth(3).unwrap().is_i64()); assert!(f.ty(&store).params().nth(4).unwrap().is_i32()); assert!(f.ty(&store).params().nth(5).unwrap().is_externref()); - assert!(f.ty(&store).params().nth(6).unwrap().is_funcref()); + assert!(f.ty(&store).params().nth(6).unwrap().is_externref()); + assert!(f.ty(&store).params().nth(7).unwrap().is_funcref()); assert_eq!(f.ty(&store).results().len(), 1); assert!(f.ty(&store).results().nth(0).unwrap().is_f64()); @@ -506,6 +524,8 @@ fn signatures_match() { #[test] #[cfg_attr(miri, ignore)] fn import_works() -> Result<()> { + let _ = env_logger::try_init(); + static HITS: AtomicUsize = AtomicUsize::new(0); let wasm = wat::parse_str( @@ -513,9 +533,9 @@ fn import_works() -> Result<()> { (import "" "" (func)) (import "" "" (func (param i32) (result i32))) (import "" "" (func (param i32) (param i64))) - (import "" "" (func (param i32 i64 i32 f32 f64 externref funcref))) + (import "" "" (func (param i32 i64 i32 f32 f64 externref externref funcref))) - (func (export "run") (param externref funcref) + (func (export "run") (param externref externref funcref) call 0 i32.const 0 call 1 @@ -531,6 +551,7 @@ fn import_works() -> Result<()> { f64.const 500 local.get 0 local.get 1 + local.get 2 call 3 ) "#, @@ -565,39 +586,55 @@ fn import_works() -> Result<()> { c: i32, d: f32, e: f64, - f: Option, - g: Option| { + f: Option>, + g: Option>, + h: Option| + -> Result<()> { assert_eq!(a, 100); assert_eq!(b, 200); assert_eq!(c, 300); assert_eq!(d, 400.0); assert_eq!(e, 500.0); + dbg!(f); + dbg!(g.as_ref()); + dbg!(f.as_ref().unwrap().data(&caller)?.downcast_ref::()); + dbg!(g.as_ref().unwrap().data(&caller)?.downcast_ref::()); assert_eq!( - f.as_ref().unwrap().data().downcast_ref::().unwrap(), + f.as_ref() + .unwrap() + .data(&caller)? + .downcast_ref::() + .unwrap(), "hello" ); + assert_eq!( + g.as_ref() + .unwrap() + .data(&caller)? + .downcast_ref::() + .unwrap(), + "goodbye" + ); let mut results = [Val::I32(0)]; - g.as_ref() + h.as_ref() .unwrap() .call(&mut caller, &[], &mut results) .unwrap(); assert_eq!(results[0].unwrap_i32(), 42); assert_eq!(HITS.fetch_add(1, SeqCst), 3); + Ok(()) }, ) .into(), ]; let instance = Instance::new(&mut store, &module, &imports)?; let run = instance.get_func(&mut store, "run").unwrap(); + let hello = Val::ExternRef(Some(ExternRef::new(&mut store, "hello".to_string()))); + dbg!(&hello); + let goodbye = Val::ExternRef(Some(ExternRef::new(&mut store, "goodbye".to_string()))); + dbg!(&goodbye); let funcref = Val::FuncRef(Some(Func::wrap(&mut store, || -> i32 { 42 }))); - run.call( - &mut store, - &[ - Val::ExternRef(Some(ExternRef::new("hello".to_string()))), - funcref, - ], - &mut [], - )?; + run.call(&mut store, &[hello, goodbye, funcref], &mut [])?; assert_eq!(HITS.load(SeqCst), 4); Ok(()) } @@ -646,8 +683,16 @@ fn get_from_wrapper() { assert!(f.typed::<(), f32>(&store).is_ok()); let f = Func::wrap(&mut store, || -> f64 { loop {} }); assert!(f.typed::<(), f64>(&store).is_ok()); - let f = Func::wrap(&mut store, || -> Option { loop {} }); - assert!(f.typed::<(), Option>(&store).is_ok()); + let f = Func::wrap(&mut store, || -> Option> { loop {} }); + assert!(f.typed::<(), Option>>(&store).is_ok()); + let f = Func::wrap(&mut store, || -> Option { loop {} }); + assert!(f.typed::<(), Option>(&store).is_ok()); + let f = Func::wrap(&mut store, || -> Option> { + loop {} + }); + assert!(f + .typed::<(), Option>>(&store) + .is_ok()); let f = Func::wrap(&mut store, || -> Option { loop {} }); assert!(f.typed::<(), Option>(&store).is_ok()); @@ -662,8 +707,14 @@ fn get_from_wrapper() { assert!(f.typed::(&store).is_ok()); let f = Func::wrap(&mut store, |_: f64| {}); assert!(f.typed::(&store).is_ok()); - let f = Func::wrap(&mut store, |_: Option| {}); - assert!(f.typed::, ()>(&store).is_ok()); + let f = Func::wrap(&mut store, |_: Option>| {}); + assert!(f.typed::>, ()>(&store).is_ok()); + let f = Func::wrap(&mut store, |_: Option| {}); + assert!(f.typed::, ()>(&store).is_ok()); + let f = Func::wrap(&mut store, |_: Option>| {}); + assert!(f + .typed::>, ()>(&store) + .is_ok()); let f = Func::wrap(&mut store, |_: Option| {}); assert!(f.typed::, ()>(&store).is_ok()); } @@ -1341,12 +1392,12 @@ fn calls_with_funcref_and_externref() -> anyhow::Result<()> { linker.func_wrap( "", "witness", - |mut caller: Caller<'_, ()>, func: Option, externref: Option| { + |mut caller: Caller<'_, ()>, func: Option, externref: Option>| { if func.is_some() { assert_my_funcref(&mut caller, func.as_ref())?; } if externref.is_some() { - assert_my_externref(externref.as_ref()); + assert_my_externref(&caller, externref); } Ok(()) }, @@ -1354,13 +1405,13 @@ fn calls_with_funcref_and_externref() -> anyhow::Result<()> { let instance = linker.instantiate(&mut store, &module)?; let typed = instance - .get_typed_func::<(Option, Option), (Option, Option)>( + .get_typed_func::<(Option, Option>), (Option>, Option)>( &mut store, "f", )?; let untyped = typed.func(); let my_funcref = Func::wrap(&mut store, || 100u32); - let my_externref = ExternRef::new(99u32); + let my_externref = ExternRef::new(&mut store, 99u32); let mut results = [Val::I32(0), Val::I32(0)]; fn assert_my_funcref(mut store: impl AsContextMut, func: Option<&Func>) -> Result<()> { @@ -1369,8 +1420,11 @@ fn calls_with_funcref_and_externref() -> anyhow::Result<()> { assert_eq!(func.typed::<(), u32>(&store)?.call(&mut store, ())?, 100); Ok(()) } - fn assert_my_externref(externref: Option<&ExternRef>) { - assert_eq!(externref.unwrap().data().downcast_ref(), Some(&99u32)); + fn assert_my_externref(store: impl AsContext, externref: Option>) { + assert_eq!( + externref.unwrap().data(&store).unwrap().downcast_ref(), + Some(&99u32) + ); } // funcref=null, externref=null @@ -1399,7 +1453,7 @@ fn calls_with_funcref_and_externref() -> anyhow::Result<()> { // funcref=null, externref=Some let (a, b) = typed.call(&mut store, (None, Some(my_externref.clone())))?; - assert_my_externref(a.as_ref()); + assert_my_externref(&store, a); assert!(b.is_none()); untyped.call( &mut store, @@ -1409,12 +1463,12 @@ fn calls_with_funcref_and_externref() -> anyhow::Result<()> { ], &mut results, )?; - assert_my_externref(results[0].unwrap_externref()); + assert_my_externref(&store, results[0].unwrap_externref().copied()); assert!(results[1].unwrap_funcref().is_none()); // funcref=Some, externref=Some let (a, b) = typed.call(&mut store, (Some(my_funcref), Some(my_externref.clone())))?; - assert_my_externref(a.as_ref()); + assert_my_externref(&store, a); assert_my_funcref(&mut store, b.as_ref())?; untyped.call( &mut store, @@ -1424,7 +1478,7 @@ fn calls_with_funcref_and_externref() -> anyhow::Result<()> { ], &mut results, )?; - assert_my_externref(results[0].unwrap_externref()); + assert_my_externref(&store, results[0].unwrap_externref().copied()); assert_my_funcref(&mut store, results[1].unwrap_funcref())?; Ok(()) @@ -1494,7 +1548,10 @@ fn typed_concrete_param() -> anyhow::Result<()> { // Calling `typed` with a type that is not a supertype nor a subtype fails // the initial type check. - let e = f.typed::, ()>(&store).err().unwrap(); + let e = f + .typed::>, ()>(&store) + .err() + .unwrap(); let e = format!("{e:?}"); assert!(e.contains("type mismatch with parameters")); assert!(e.contains("type mismatch: expected func, found extern")); @@ -1547,7 +1604,7 @@ fn typed_concrete_result() -> anyhow::Result<()> { )); // Nor some unrelated type that it is neither a subtype or supertype of. - let e = f.typed::<(), ExternRef>(&store).err().unwrap(); + let e = f.typed::<(), Rooted>(&store).err().unwrap(); let e = format!("{e:?}"); assert!(e.contains("type mismatch with results")); assert!(e.contains( diff --git a/tests/all/gc.rs b/tests/all/gc.rs index 7910e5739941..23713c7e0a84 100644 --- a/tests/all/gc.rs +++ b/tests/all/gc.rs @@ -14,17 +14,17 @@ impl Drop for SetFlagOnDrop { #[test] #[cfg_attr(miri, ignore)] -fn smoke_test_gc() -> anyhow::Result<()> { +fn smoke_test_gc() -> Result<()> { smoke_test_gc_impl(false) } #[test] #[cfg_attr(miri, ignore)] -fn smoke_test_gc_epochs() -> anyhow::Result<()> { +fn smoke_test_gc_epochs() -> Result<()> { smoke_test_gc_impl(true) } -fn smoke_test_gc_impl(use_epochs: bool) -> anyhow::Result<()> { +fn smoke_test_gc_impl(use_epochs: bool) -> Result<()> { let (mut store, module) = ref_types_module( use_epochs, r#" @@ -56,23 +56,26 @@ fn smoke_test_gc_impl(use_epochs: bool) -> anyhow::Result<()> { let func = instance.get_func(&mut store, "func").unwrap(); let inner_dropped = Arc::new(AtomicBool::new(false)); - let r = ExternRef::new(SetFlagOnDrop(inner_dropped.clone())); + { - let args = [Val::I32(5), Val::ExternRef(Some(r.clone()))]; - func.call(&mut store, &args, &mut [Val::I32(0)])?; - } + let mut scope = RootScope::new(&mut store); + + let r = ExternRef::new(&mut scope, SetFlagOnDrop(inner_dropped.clone())); + { + let args = [Val::I32(5), Val::ExternRef(Some(r.clone()))]; + func.call(&mut scope, &args, &mut [Val::I32(0)])?; + } - // Still held alive by the `VMExternRefActivationsTable` (potentially in - // multiple slots within the table) and by this `r` local. - assert!(r.strong_count() >= 2); + // Doing a GC should see that there aren't any `externref`s on the stack in + // Wasm frames anymore. + scope.as_context_mut().gc(); - // Doing a GC should see that there aren't any `externref`s on the stack in - // Wasm frames anymore. - store.gc(); - assert_eq!(r.strong_count(), 1); + // But the scope should still be rooting `r`. + assert!(!inner_dropped.load(SeqCst)); + } - // Dropping `r` should drop the inner `SetFlagOnDrop` value. - drop(r); + // Exiting the scope and unrooting `r` should have dropped the inner + // `SetFlagOnDrop` value. assert!(inner_dropped.load(SeqCst)); Ok(()) @@ -80,7 +83,7 @@ fn smoke_test_gc_impl(use_epochs: bool) -> anyhow::Result<()> { #[test] #[cfg_attr(miri, ignore)] -fn wasm_dropping_refs() -> anyhow::Result<()> { +fn wasm_dropping_refs() -> Result<()> { let (mut store, module) = ref_types_module( false, r#" @@ -100,9 +103,10 @@ fn wasm_dropping_refs() -> anyhow::Result<()> { // NB: 4096 is greater than the initial `VMExternRefActivationsTable` // capacity, so this will trigger at least one GC. for _ in 0..4096 { - let r = ExternRef::new(CountDrops(num_refs_dropped.clone())); + let mut scope = RootScope::new(&mut store); + let r = ExternRef::new(&mut scope, CountDrops(num_refs_dropped.clone())); let args = [Val::ExternRef(Some(r))]; - drop_ref.call(&mut store, &args, &mut [])?; + drop_ref.call(&mut scope, &args, &mut [])?; } assert!(num_refs_dropped.load(SeqCst) > 0); @@ -124,7 +128,7 @@ fn wasm_dropping_refs() -> anyhow::Result<()> { #[test] #[cfg_attr(miri, ignore)] -fn many_live_refs() -> anyhow::Result<()> { +fn many_live_refs() -> Result<()> { let mut wat = r#" (module ;; Make new `externref`s. @@ -166,21 +170,33 @@ fn many_live_refs() -> anyhow::Result<()> { let make_ref = Func::wrap(&mut store, { let live_refs = live_refs.clone(); - move || Some(ExternRef::new(CountLiveRefs::new(live_refs.clone()))) + move |mut caller: Caller<'_, _>| { + Some(ExternRef::new( + &mut caller, + CountLiveRefs::new(live_refs.clone()), + )) + } }); - let observe_ref = Func::wrap(&mut store, |r: Option| { - let r = r.unwrap(); - let r = r.data().downcast_ref::().unwrap(); - assert!(r.live_refs.load(SeqCst) > 0); - }); + let observe_ref = Func::wrap( + &mut store, + |caller: Caller<'_, _>, r: Option>| { + let r = r + .unwrap() + .data(&caller) + .unwrap() + .downcast_ref::() + .unwrap(); + assert!(r.live_refs.load(SeqCst) > 0); + }, + ); let instance = Instance::new(&mut store, &module, &[make_ref.into(), observe_ref.into()])?; let many_live_refs = instance.get_func(&mut store, "many_live_refs").unwrap(); many_live_refs.call(&mut store, &[], &mut [])?; - store.gc(); + store.as_context_mut().gc(); assert_eq!(live_refs.load(SeqCst), 0); return Ok(()); @@ -205,7 +221,7 @@ fn many_live_refs() -> anyhow::Result<()> { #[test] #[cfg_attr(miri, ignore)] -fn drop_externref_via_table_set() -> anyhow::Result<()> { +fn drop_externref_via_table_set() -> Result<()> { let (mut store, module) = ref_types_module( false, r#" @@ -225,21 +241,27 @@ fn drop_externref_via_table_set() -> anyhow::Result<()> { let foo_is_dropped = Arc::new(AtomicBool::new(false)); let bar_is_dropped = Arc::new(AtomicBool::new(false)); - let foo = ExternRef::new(SetFlagOnDrop(foo_is_dropped.clone())); - let bar = ExternRef::new(SetFlagOnDrop(bar_is_dropped.clone())); - { - let args = vec![Val::ExternRef(Some(foo))]; - table_set.call(&mut store, &args, &mut [])?; - } - store.gc(); - assert!(!foo_is_dropped.load(SeqCst)); - assert!(!bar_is_dropped.load(SeqCst)); + let mut scope = RootScope::new(&mut store); - { - let args = vec![Val::ExternRef(Some(bar))]; - table_set.call(&mut store, &args, &mut [])?; + let foo = ExternRef::new(&mut scope, SetFlagOnDrop(foo_is_dropped.clone())); + let bar = ExternRef::new(&mut scope, SetFlagOnDrop(bar_is_dropped.clone())); + + { + let args = vec![Val::ExternRef(Some(foo))]; + table_set.call(&mut scope, &args, &mut [])?; + } + + scope.as_context_mut().gc(); + assert!(!foo_is_dropped.load(SeqCst)); + assert!(!bar_is_dropped.load(SeqCst)); + + { + let args = vec![Val::ExternRef(Some(bar))]; + table_set.call(&mut scope, &args, &mut [])?; + } } + store.gc(); assert!(foo_is_dropped.load(SeqCst)); assert!(!bar_is_dropped.load(SeqCst)); @@ -253,7 +275,8 @@ fn drop_externref_via_table_set() -> anyhow::Result<()> { #[test] #[cfg_attr(miri, ignore)] -fn global_drops_externref() -> anyhow::Result<()> { +fn global_drops_externref() -> Result<()> { + let _ = env_logger::try_init(); test_engine(&Engine::default())?; if !skip_pooling_allocator_tests() { @@ -264,10 +287,10 @@ fn global_drops_externref() -> anyhow::Result<()> { return Ok(()); - fn test_engine(engine: &Engine) -> anyhow::Result<()> { + fn test_engine(engine: &Engine) -> Result<()> { let mut store = Store::new(&engine, ()); let flag = Arc::new(AtomicBool::new(false)); - let externref = ExternRef::new(SetFlagOnDrop(flag.clone())); + let externref = ExternRef::new(&mut store, SetFlagOnDrop(flag.clone())); Global::new( &mut store, GlobalType::new(ValType::EXTERNREF, Mutability::Const), @@ -291,9 +314,9 @@ fn global_drops_externref() -> anyhow::Result<()> { "#, )?; let instance = Instance::new(&mut store, &module, &[])?; - let run = instance.get_typed_func::, ()>(&mut store, "run")?; + let run = instance.get_typed_func::>, ()>(&mut store, "run")?; let flag = Arc::new(AtomicBool::new(false)); - let externref = ExternRef::new(SetFlagOnDrop(flag.clone())); + let externref = ExternRef::new(&mut store, SetFlagOnDrop(flag.clone())); run.call(&mut store, Some(externref))?; drop(store); assert!(flag.load(SeqCst)); @@ -303,7 +326,7 @@ fn global_drops_externref() -> anyhow::Result<()> { #[test] #[cfg_attr(miri, ignore)] -fn table_drops_externref() -> anyhow::Result<()> { +fn table_drops_externref() -> Result<()> { test_engine(&Engine::default())?; if !skip_pooling_allocator_tests() { @@ -314,10 +337,10 @@ fn table_drops_externref() -> anyhow::Result<()> { return Ok(()); - fn test_engine(engine: &Engine) -> anyhow::Result<()> { + fn test_engine(engine: &Engine) -> Result<()> { let mut store = Store::new(&engine, ()); let flag = Arc::new(AtomicBool::new(false)); - let externref = ExternRef::new(SetFlagOnDrop(flag.clone())); + let externref = ExternRef::new(&mut store, SetFlagOnDrop(flag.clone())); Table::new( &mut store, TableType::new(RefType::EXTERNREF, 1, None), @@ -342,9 +365,9 @@ fn table_drops_externref() -> anyhow::Result<()> { "#, )?; let instance = Instance::new(&mut store, &module, &[])?; - let run = instance.get_typed_func::, ()>(&mut store, "run")?; + let run = instance.get_typed_func::>, ()>(&mut store, "run")?; let flag = Arc::new(AtomicBool::new(false)); - let externref = ExternRef::new(SetFlagOnDrop(flag.clone())); + let externref = ExternRef::new(&mut store, SetFlagOnDrop(flag.clone())); run.call(&mut store, Some(externref))?; drop(store); assert!(flag.load(SeqCst)); @@ -353,72 +376,7 @@ fn table_drops_externref() -> anyhow::Result<()> { } #[test] -#[cfg_attr(miri, ignore)] -fn gee_i_sure_hope_refcounting_is_atomic() -> anyhow::Result<()> { - let mut config = Config::new(); - config.wasm_reference_types(true); - config.epoch_interruption(true); - let engine = Engine::new(&config)?; - let mut store = Store::new(&engine, ()); - let module = Module::new( - &engine, - r#" - (module - (global (mut externref) (ref.null extern)) - (table 1 externref) - - (func (export "run") (param externref) - local.get 0 - global.set 0 - i32.const 0 - local.get 0 - table.set 0 - loop - global.get 0 - global.set 0 - - i32.const 0 - i32.const 0 - table.get - table.set - - local.get 0 - call $f - - br 0 - end - ) - - (func $f (param externref)) - ) - "#, - )?; - - let instance = Instance::new(&mut store, &module, &[])?; - let run = instance.get_typed_func::, ()>(&mut store, "run")?; - - let flag = Arc::new(AtomicBool::new(false)); - let externref = ExternRef::new(SetFlagOnDrop(flag.clone())); - let externref2 = externref.clone(); - - let child = std::thread::spawn(move || run.call(&mut store, Some(externref2))); - - for _ in 0..10000 { - drop(externref.clone()); - } - engine.increment_epoch(); - - assert!(child.join().unwrap().is_err()); - assert!(!flag.load(SeqCst)); - assert_eq!(externref.strong_count(), 1); - drop(externref); - assert!(flag.load(SeqCst)); - - Ok(()) -} - -#[test] -fn global_init_no_leak() -> anyhow::Result<()> { +fn global_init_no_leak() -> Result<()> { let (mut store, module) = ref_types_module( false, r#" @@ -429,7 +387,8 @@ fn global_init_no_leak() -> anyhow::Result<()> { "#, )?; - let externref = ExternRef::new(()); + let flag = Arc::new(AtomicBool::new(false)); + let externref = ExternRef::new(&mut store, SetFlagOnDrop(flag.clone())); let global = Global::new( &mut store, GlobalType::new(ValType::EXTERNREF, Mutability::Const), @@ -437,14 +396,14 @@ fn global_init_no_leak() -> anyhow::Result<()> { )?; Instance::new(&mut store, &module, &[global.into()])?; drop(store); - assert_eq!(externref.strong_count(), 1); + assert!(flag.load(SeqCst)); Ok(()) } #[test] #[cfg_attr(miri, ignore)] -fn no_gc_middle_of_args() -> anyhow::Result<()> { +fn no_gc_middle_of_args() -> Result<()> { let (mut store, module) = ref_types_module( false, r#" @@ -470,23 +429,43 @@ fn no_gc_middle_of_args() -> anyhow::Result<()> { )?; let mut linker = Linker::new(store.engine()); - linker.func_wrap("", "return_some", || { - ( - Some(ExternRef::new("a".to_string())), - Some(ExternRef::new("b".to_string())), - Some(ExternRef::new("c".to_string())), - ) + linker.func_wrap("", "return_some", |mut caller: Caller<'_, _>| { + let a = Some(ExternRef::new(&mut caller, String::from("a"))); + let b = Some(ExternRef::new(&mut caller, String::from("b"))); + let c = Some(ExternRef::new(&mut caller, String::from("c"))); + (a, b, c) })?; linker.func_wrap( "", "take_some", - |a: Option, b: Option, c: Option| { + |caller: Caller<'_, _>, + a: Option>, + b: Option>, + c: Option>| { let a = a.unwrap(); let b = b.unwrap(); let c = c.unwrap(); - assert_eq!(a.data().downcast_ref::().unwrap(), "a"); - assert_eq!(b.data().downcast_ref::().unwrap(), "b"); - assert_eq!(c.data().downcast_ref::().unwrap(), "c"); + assert_eq!( + a.data(&caller) + .expect("rooted") + .downcast_ref::() + .expect("is string"), + "a" + ); + assert_eq!( + b.data(&caller) + .expect("rooted") + .downcast_ref::() + .expect("is string"), + "b" + ); + assert_eq!( + c.data(&caller) + .expect("rooted") + .downcast_ref::() + .expect("is string"), + "c" + ); }, )?; @@ -503,7 +482,7 @@ fn no_gc_middle_of_args() -> anyhow::Result<()> { // TODO(6530): s390x doesn't support tail calls yet. target_arch = "s390x" ), ignore)] -fn gc_and_tail_calls_and_stack_arguments() -> anyhow::Result<()> { +fn gc_and_tail_calls_and_stack_arguments() -> Result<()> { // Test that GC refs in tail-calls' stack arguments get properly accounted // for in stack maps. // @@ -622,23 +601,35 @@ fn gc_and_tail_calls_and_stack_arguments() -> anyhow::Result<()> { )?; let mut linker = Linker::new(store.engine()); - linker.func_wrap("", "make_some", || { + linker.func_wrap("", "make_some", |mut caller: Caller<'_, _>| { ( - Some(ExternRef::new("a".to_string())), - Some(ExternRef::new("b".to_string())), - Some(ExternRef::new("c".to_string())), + Some(ExternRef::new(&mut caller, "a".to_string())), + Some(ExternRef::new(&mut caller, "b".to_string())), + Some(ExternRef::new(&mut caller, "c".to_string())), ) })?; linker.func_wrap( "", "take_some", - |a: Option, b: Option, c: Option| { + |caller: Caller<'_, _>, + a: Option>, + b: Option>, + c: Option>| { let a = a.unwrap(); let b = b.unwrap(); let c = c.unwrap(); - assert_eq!(a.data().downcast_ref::().unwrap(), "a"); - assert_eq!(b.data().downcast_ref::().unwrap(), "b"); - assert_eq!(c.data().downcast_ref::().unwrap(), "c"); + assert_eq!( + a.data(&caller).unwrap().downcast_ref::().unwrap(), + "a" + ); + assert_eq!( + b.data(&caller).unwrap().downcast_ref::().unwrap(), + "b" + ); + assert_eq!( + c.data(&caller).unwrap().downcast_ref::().unwrap(), + "c" + ); }, )?; linker.func_wrap("", "gc", |mut caller: Caller<()>| { @@ -683,10 +674,11 @@ fn no_leak_with_global_get_elem_segment() -> anyhow::Result<()> { "#, )?; + let externref = ExternRef::new(&mut store, SetFlagOnDrop(dropped.clone())); let global = Global::new( &mut store, GlobalType::new(ValType::EXTERNREF, Mutability::Const), - Val::ExternRef(Some(ExternRef::new(SetFlagOnDrop(dropped.clone())))), + externref.into(), )?; Instance::new(&mut store, &module, &[global.into()])?; @@ -716,10 +708,11 @@ fn table_init_with_externref_global_get() -> anyhow::Result<()> { "#, )?; + let externref = ExternRef::new(&mut store, SetFlagOnDrop(dropped.clone())); let global = Global::new( &mut store, GlobalType::new(ValType::EXTERNREF, Mutability::Const), - Val::ExternRef(Some(ExternRef::new(SetFlagOnDrop(dropped.clone())))), + externref.into(), )?; Instance::new(&mut store, &module, &[global.into()])?; @@ -729,3 +722,39 @@ fn table_init_with_externref_global_get() -> anyhow::Result<()> { assert!(dropped.load(SeqCst)); Ok(()) } + +#[test] +fn rooted_gets_collected_after_scope_exit() -> Result<()> { + let mut store = Store::<()>::default(); + let flag = Arc::new(AtomicBool::new(false)); + + { + let mut scope = RootScope::new(&mut store); + let _externref = ExternRef::new(&mut scope, SetFlagOnDrop(flag.clone())); + + scope.as_context_mut().gc(); + assert!(!flag.load(SeqCst), "not dropped when still rooted"); + } + + store.as_context_mut().gc(); + assert!(flag.load(SeqCst), "dropped after being unrooted"); + + Ok(()) +} + +#[test] +fn manually_rooted_gets_collected_after_unrooting() -> Result<()> { + let mut store = Store::<()>::default(); + let flag = Arc::new(AtomicBool::new(false)); + + let externref = ExternRef::new_manually_rooted(&mut store, SetFlagOnDrop(flag.clone())); + + store.gc(); + assert!(!flag.load(SeqCst), "not dropped when still rooted"); + + externref.unroot(&mut store); + store.gc(); + assert!(flag.load(SeqCst), "dropped after being unrooted"); + + Ok(()) +} diff --git a/tests/all/host_funcs.rs b/tests/all/host_funcs.rs index 4609b3b6ed32..bf8f4b6ded2a 100644 --- a/tests/all/host_funcs.rs +++ b/tests/all/host_funcs.rs @@ -31,7 +31,7 @@ fn wrap_func() -> Result<()> { linker.func_wrap("", "", || -> i64 { 0 })?; linker.func_wrap("m", "f", || -> f32 { 0.0 })?; linker.func_wrap("m2", "f", || -> f64 { 0.0 })?; - linker.func_wrap("m3", "", || -> Option { None })?; + linker.func_wrap("m3", "", || -> Option> { None })?; linker.func_wrap("m3", "f", || -> Option { None })?; linker.func_wrap("", "f1", || -> Result<()> { loop {} })?; @@ -39,7 +39,9 @@ fn wrap_func() -> Result<()> { linker.func_wrap("", "f3", || -> Result { loop {} })?; linker.func_wrap("", "f4", || -> Result { loop {} })?; linker.func_wrap("", "f5", || -> Result { loop {} })?; - linker.func_wrap("", "f6", || -> Result> { loop {} })?; + linker.func_wrap("", "f6", || -> Result>> { + loop {} + })?; linker.func_wrap("", "f7", || -> Result> { loop {} })?; Ok(()) } @@ -142,9 +144,14 @@ fn signatures_match() -> Result<()> { linker.func_wrap( "", "f6", - |_: f32, _: f64, _: i32, _: i64, _: i32, _: Option, _: Option| -> f64 { - loop {} - }, + |_: f32, + _: f64, + _: i32, + _: i64, + _: i32, + _: Option>, + _: Option| + -> f64 { loop {} }, )?; let mut store = Store::new(&engine, ()); @@ -275,7 +282,7 @@ fn import_works() -> Result<()> { c: i32, d: f32, e: f64, - f: Option, + f: Option>, g: Option| { assert_eq!(a, 100); assert_eq!(b, 200); @@ -283,7 +290,12 @@ fn import_works() -> Result<()> { assert_eq!(d, 400.0); assert_eq!(e, 500.0); assert_eq!( - f.as_ref().unwrap().data().downcast_ref::().unwrap(), + f.as_ref() + .unwrap() + .data(&caller) + .unwrap() + .downcast_ref::() + .unwrap(), "hello" ); let mut results = [Val::I32(0)]; @@ -302,14 +314,8 @@ fn import_works() -> Result<()> { let instance = linker.instantiate(&mut store, &module)?; let run = instance.get_func(&mut store, "run").unwrap(); let funcref = Val::FuncRef(Some(Func::wrap(&mut store, || -> i32 { 42 }))); - run.call( - &mut store, - &[ - Val::ExternRef(Some(ExternRef::new("hello".to_string()))), - funcref, - ], - &mut [], - )?; + let externref = Val::ExternRef(Some(ExternRef::new(&mut store, "hello".to_string()))); + run.call(&mut store, &[externref, funcref], &mut [])?; assert_eq!(HITS.load(SeqCst), 4);