From cfc0495e6e9cb967ec76936d28dfc6050df6aa6a Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Tue, 19 Aug 2025 15:22:11 -0700 Subject: [PATCH 1/3] Make const-expr evaluation `async` This commit is extracted from #11430 to accurately reflect how const-expr evaluation is an async operation due to GC pauses that may happen. The changes in this commit are: * Const-expr evaluation is, at its core, now an `async` function. * To leverage this new `async`-ness all internal operations are switched from `*_maybe_async` to `*_async` meaning all the `*_maybe_async` methods can be removed. * Some libcalls using `*_maybe_async` are switch to using `*_async` plus the `block_on!` utility to help jettison more `*_maybe_async` methods. * Instance initialization is now an `async` function. This is temporarily handled with `block_on` during instance initialization to avoid propagating the `async`-ness further upwards. This `block_on` will get deleted in future refactorings. * Const-expr evaluation has been refactored slightly to enable having a fast path in global initialization which skips an `await` point entirely, achieving performance-parity in benchmarks prior to this commit. This ended up fixing a niche issue with GC where if a wasm execution was suspended during `table.init`, for example, during a const-expr evaluation triggering a GC then if the wasm execution was cancelled it would panic the host. This panic was because the GC operation returned `Result` but it was `unwrap`'d as part of the const-expr evaluation which can fail not only to invalid-ness but also due to "computation is cancelled" traps. --- .../src/runtime/gc/enabled/arrayref.rs | 34 -- .../src/runtime/gc/enabled/structref.rs | 23 -- crates/wasmtime/src/runtime/instance.rs | 16 +- crates/wasmtime/src/runtime/store/gc.rs | 51 --- crates/wasmtime/src/runtime/vm/const_expr.rs | 130 ++++--- crates/wasmtime/src/runtime/vm/instance.rs | 14 +- .../src/runtime/vm/instance/allocator.rs | 120 +++--- crates/wasmtime/src/runtime/vm/libcalls.rs | 348 +++++++++--------- 8 files changed, 336 insertions(+), 400 deletions(-) diff --git a/crates/wasmtime/src/runtime/gc/enabled/arrayref.rs b/crates/wasmtime/src/runtime/gc/enabled/arrayref.rs index d44ff776bf4c..a5bcbd0e8b89 100644 --- a/crates/wasmtime/src/runtime/gc/enabled/arrayref.rs +++ b/crates/wasmtime/src/runtime/gc/enabled/arrayref.rs @@ -367,25 +367,6 @@ impl ArrayRef { .await } - /// Like `ArrayRef::new` but when async is configured must only ever be - /// called from on a fiber stack. - pub(crate) unsafe fn new_maybe_async( - store: &mut StoreOpaque, - allocator: &ArrayRefPre, - elem: &Val, - len: u32, - ) -> Result> { - // Type check the initial element value against the element type. - elem.ensure_matches_ty(store, allocator.ty.element_type().unpack()) - .context("element type mismatch")?; - - unsafe { - store.retry_after_gc_maybe_async((), |store, ()| { - Self::new_from_iter(store, allocator, RepeatN(elem, len)) - }) - } - } - /// Allocate a new array of the given elements. /// /// Does not attempt a GC on OOM; leaves that to callers. @@ -544,21 +525,6 @@ impl ArrayRef { .await } - /// Like `ArrayRef::new_fixed[_async]` but it is the caller's responsibility - /// to ensure that when async is enabled, this is only called from on a - /// fiber stack. - pub(crate) unsafe fn new_fixed_maybe_async( - store: &mut StoreOpaque, - allocator: &ArrayRefPre, - elems: &[Val], - ) -> Result> { - unsafe { - store.retry_after_gc_maybe_async((), |store, ()| { - Self::new_from_iter(store, allocator, elems.iter()) - }) - } - } - #[inline] pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool { self.inner.comes_from_same_store(store) diff --git a/crates/wasmtime/src/runtime/gc/enabled/structref.rs b/crates/wasmtime/src/runtime/gc/enabled/structref.rs index d65beef4e5c1..53f03601cd07 100644 --- a/crates/wasmtime/src/runtime/gc/enabled/structref.rs +++ b/crates/wasmtime/src/runtime/gc/enabled/structref.rs @@ -269,10 +269,6 @@ impl StructRef { /// /// # Panics /// - /// Panics if your engine is not configured for async; use - /// [`StructRef::new`][crate::StructRef::new] to perform synchronous - /// allocation instead. - /// /// Panics if the allocator, or any of the field values, is not associated /// with the given store. #[cfg(feature = "async")] @@ -290,10 +286,6 @@ impl StructRef { allocator: &StructRefPre, fields: &[Val], ) -> Result> { - assert!( - store.async_support(), - "use `StructRef::new` with synchronous stores" - ); Self::type_check_fields(store, allocator, fields)?; store .retry_after_gc_async((), |store, ()| { @@ -302,21 +294,6 @@ impl StructRef { .await } - /// Like `Self::new` but caller's must ensure that if the store is - /// configured for async, this is only ever called from on a fiber stack. - pub(crate) unsafe fn new_maybe_async( - store: &mut StoreOpaque, - allocator: &StructRefPre, - fields: &[Val], - ) -> Result> { - Self::type_check_fields(store, allocator, fields)?; - unsafe { - store.retry_after_gc_maybe_async((), |store, ()| { - Self::new_unchecked(store, allocator, fields) - }) - } - } - /// Type check the field values before allocating a new struct. fn type_check_fields( store: &mut StoreOpaque, diff --git a/crates/wasmtime/src/runtime/instance.rs b/crates/wasmtime/src/runtime/instance.rs index 938129d34966..0666c9f99355 100644 --- a/crates/wasmtime/src/runtime/instance.rs +++ b/crates/wasmtime/src/runtime/instance.rs @@ -377,7 +377,21 @@ impl Instance { .features() .contains(WasmFeatures::BULK_MEMORY); - vm::initialize_instance(store, id, compiled_module.module(), bulk_memory)?; + if store.async_support() { + store.block_on(|store| { + let module = compiled_module.module().clone(); + Box::pin( + async move { vm::initialize_instance(store, id, &module, bulk_memory).await }, + ) + })??; + } else { + vm::assert_ready(vm::initialize_instance( + store, + id, + compiled_module.module(), + bulk_memory, + ))?; + } Ok((instance, compiled_module.module().start_func)) } diff --git a/crates/wasmtime/src/runtime/store/gc.rs b/crates/wasmtime/src/runtime/store/gc.rs index 6cbc7bc095be..93614a730200 100644 --- a/crates/wasmtime/src/runtime/store/gc.rs +++ b/crates/wasmtime/src/runtime/store/gc.rs @@ -197,53 +197,6 @@ impl StoreOpaque { }, } } - - /// Like `retry_after_gc` but async yielding (if necessary) is transparent. - /// - /// # Safety - /// - /// When async is enabled, it is the caller's responsibility to ensure that - /// this is called on a fiber stack. - pub(crate) unsafe fn retry_after_gc_maybe_async( - &mut self, - value: T, - alloc_func: impl Fn(&mut Self, T) -> Result, - ) -> Result - where - T: Send + Sync + 'static, - { - self.ensure_gc_store()?; - match alloc_func(self, value) { - Ok(x) => Ok(x), - Err(e) => match e.downcast::>() { - Ok(oom) => { - let (value, oom) = oom.take_inner(); - // Note it's the caller's responsibility to ensure that - // this is on a fiber stack if necessary. - // - // SAFETY: FIXME(#11409) - unsafe { - if self.async_support() { - #[cfg(feature = "async")] - self.block_on(|store| { - Box::pin( - store.gc_unsafe_get_limiter(None, Some(oom.bytes_needed())), - ) - })?; - #[cfg(not(feature = "async"))] - unreachable!(); - } else { - vm::assert_ready( - self.gc_unsafe_get_limiter(None, Some(oom.bytes_needed())), - ); - } - } - alloc_func(self, value) - } - Err(e) => Err(e), - }, - } - } } #[cfg(feature = "async")] @@ -258,10 +211,6 @@ impl StoreOpaque { where T: Send + Sync + 'static, { - assert!( - self.async_support(), - "you must configure async to use the `*_async` versions of methods" - ); self.ensure_gc_store()?; match alloc_func(self, value) { Ok(x) => Ok(x), diff --git a/crates/wasmtime/src/runtime/vm/const_expr.rs b/crates/wasmtime/src/runtime/vm/const_expr.rs index 807305e47760..19350744f358 100644 --- a/crates/wasmtime/src/runtime/vm/const_expr.rs +++ b/crates/wasmtime/src/runtime/vm/const_expr.rs @@ -1,6 +1,7 @@ //! Evaluating const expressions. use crate::prelude::*; +use crate::runtime::vm; use crate::store::{AutoAssertNoGc, InstanceId, StoreOpaque}; #[cfg(feature = "gc")] use crate::{ @@ -15,9 +16,18 @@ use wasmtime_environ::{VMSharedTypeIndex, WasmCompositeInnerType, WasmCompositeT /// /// This can be reused across many const expression evaluations to reuse /// allocated resources, if any. -#[derive(Default)] pub struct ConstExprEvaluator { stack: Vec, + simple: Val, +} + +impl Default for ConstExprEvaluator { + fn default() -> ConstExprEvaluator { + ConstExprEvaluator { + stack: Vec::new(), + simple: Val::I32(0), + } + } } /// The context within which a particular const expression is evaluated. @@ -58,9 +68,8 @@ impl ConstEvalContext { fields.len() } - /// Safety: field values must be of the correct types. #[cfg(feature = "gc")] - unsafe fn struct_new( + async fn struct_new( &mut self, store: &mut StoreOpaque, shared_ty: VMSharedTypeIndex, @@ -68,12 +77,12 @@ impl ConstEvalContext { ) -> Result { let struct_ty = StructType::from_shared_type_index(store.engine(), shared_ty); let allocator = StructRefPre::_new(store, struct_ty); - let struct_ref = unsafe { StructRef::new_maybe_async(store, &allocator, &fields)? }; + let struct_ref = StructRef::_new_async(store, &allocator, &fields).await?; Ok(Val::AnyRef(Some(struct_ref.into()))) } #[cfg(feature = "gc")] - fn struct_new_default( + async fn struct_new_default( &mut self, store: &mut StoreOpaque, shared_ty: VMSharedTypeIndex, @@ -121,13 +130,50 @@ impl ConstEvalContext { }) .collect::>(); - unsafe { self.struct_new(store, shared_ty, &fields) } + self.struct_new(store, shared_ty, &fields).await } } impl ConstExprEvaluator { - /// Evaluate the given const expression in the given context. + /// Same as [`Self::eval`] except that this is specifically intended for + /// integral constant expression. /// + /// # Panics + /// + /// Panics if `ConstExpr` contains GC ops (e.g. it's not for an integral + /// type). + pub fn eval_int( + &mut self, + store: &mut StoreOpaque, + context: &mut ConstEvalContext, + expr: &ConstExpr, + ) -> Result<&Val> { + // Try to evaluate a simple expression first before doing the more + // complicated eval loop below. + if self.try_simple(expr).is_some() { + return Ok(&self.simple); + } + + // Note that `assert_ready` here should be valid as production of an + // integer cannot involve GC meaning that async operations aren't used. + let mut scope = OpaqueRootScope::new(store); + vm::assert_ready(self.eval_loop(&mut scope, context, expr)) + } + + /// Attempts to peek into `expr` to see if it's trivial to evaluate, e.g. + /// for `i32.const N`. + #[inline] + pub fn try_simple(&mut self, expr: &ConstExpr) -> Option<&Val> { + match expr.ops() { + [ConstOp::I32Const(i)] => Some(self.return_one(Val::I32(*i))), + [ConstOp::I64Const(i)] => Some(self.return_one(Val::I64(*i))), + [ConstOp::F32Const(f)] => Some(self.return_one(Val::F32(*f))), + [ConstOp::F64Const(f)] => Some(self.return_one(Val::F64(*f))), + _ => None, + } + } + + /// Evaluate the given const expression in the given context. /// /// Note that the `store` argument is an `OpaqueRootScope` which is used to /// require that a GC rooting scope external to evaluation of this constant @@ -135,59 +181,44 @@ impl ConstExprEvaluator { /// and itself trigger a GC meaning that all references must be rooted, /// hence the external requirement of a rooting scope. /// - /// # Unsafety - /// - /// When async is enabled, this may only be executed on a fiber stack. + /// # Panics /// - /// The given const expression must be valid within the given context, - /// e.g. the const expression must be well-typed and the context must return - /// global values of the expected types. This evaluator operates directly on - /// untyped `ValRaw`s and does not and cannot check that its operands are of - /// the correct type. - /// - /// If given async store, then this must be called from on an async fiber - /// stack. - pub unsafe fn eval( + /// This function will panic if `expr` is an invalid constant expression. + pub async fn eval( &mut self, store: &mut OpaqueRootScope<&mut StoreOpaque>, context: &mut ConstEvalContext, expr: &ConstExpr, ) -> Result<&Val> { - match expr.ops() { - // Skip the interpreter loop for some known constant patterns that - // are easy to calculate the result of. - [ConstOp::I32Const(i)] => self.return_one(Val::I32(*i)), - [ConstOp::I64Const(i)] => self.return_one(Val::I64(*i)), - [ConstOp::F32Const(f)] => self.return_one(Val::F32(*f)), - [ConstOp::F64Const(f)] => self.return_one(Val::F64(*f)), - - // Fall back to the interpreter loop for all other expressions. - // - // SAFETY: this function has the same contract as `eval_loop`. - other => unsafe { self.eval_loop(store, context, other) }, + // Same structure as `eval_int` above, except using `.await` and with a + // slightly different type signature here for this function. + if self.try_simple(expr).is_some() { + return Ok(&self.simple); } + self.eval_loop(store, context, expr).await } - fn return_one(&mut self, val: Val) -> Result<&Val> { - self.stack.clear(); - self.stack.push(val); - Ok(&self.stack[0]) + #[inline] + fn return_one(&mut self, val: Val) -> &Val { + self.simple = val; + &self.simple + // self.stack.clear(); + // self.stack.push(val); + // &self.stack[0] } - /// # Safety - /// - /// See [`Self::eval`]. - unsafe fn eval_loop( + #[cold] + async fn eval_loop( &mut self, store: &mut OpaqueRootScope<&mut StoreOpaque>, context: &mut ConstEvalContext, - ops: &[ConstOp], + expr: &ConstExpr, ) -> Result<&Val> { - log::trace!("evaluating const expr: {ops:?}"); + log::trace!("evaluating const expr: {expr:?}"); self.stack.clear(); - for op in ops { + for op in expr.ops() { log::trace!("const-evaluating op: {op:?}"); match op { ConstOp::I32Const(i) => self.stack.push(Val::I32(*i)), @@ -267,9 +298,9 @@ impl ConstExprEvaluator { } let start = self.stack.len() - len; - let s = unsafe { - context.struct_new(store, interned_type_index, &self.stack[start..])? - }; + let s = context + .struct_new(store, interned_type_index, &self.stack[start..]) + .await?; self.stack.truncate(start); self.stack.push(s); } @@ -279,7 +310,8 @@ impl ConstExprEvaluator { let ty = store.instance(context.instance).env_module().types [*struct_type_index] .unwrap_engine_type_index(); - self.stack.push(context.struct_new_default(store, ty)?); + self.stack + .push(context.struct_new_default(store, ty).await?); } #[cfg(feature = "gc")] @@ -293,7 +325,7 @@ impl ConstExprEvaluator { let elem = self.pop()?; let pre = ArrayRefPre::_new(store, ty); - let array = unsafe { ArrayRef::new_maybe_async(store, &pre, &elem, len)? }; + let array = ArrayRef::_new_async(store, &pre, &elem, len).await?; self.stack.push(Val::AnyRef(Some(array.into()))); } @@ -310,7 +342,7 @@ impl ConstExprEvaluator { .expect("type should have a default value"); let pre = ArrayRefPre::_new(store, ty); - let array = unsafe { ArrayRef::new_maybe_async(store, &pre, &elem, len)? }; + let array = ArrayRef::_new_async(store, &pre, &elem, len).await?; self.stack.push(Val::AnyRef(Some(array.into()))); } @@ -340,7 +372,7 @@ impl ConstExprEvaluator { .collect::>(); let pre = ArrayRefPre::_new(store, ty); - let array = unsafe { ArrayRef::new_fixed_maybe_async(store, &pre, &elems)? }; + let array = ArrayRef::_new_fixed_async(store, &pre, &elems).await?; self.stack.push(Val::AnyRef(Some(array.into()))); } diff --git a/crates/wasmtime/src/runtime/vm/instance.rs b/crates/wasmtime/src/runtime/vm/instance.rs index 9b0aa6771f5e..f1e8d87561ab 100644 --- a/crates/wasmtime/src/runtime/vm/instance.rs +++ b/crates/wasmtime/src/runtime/vm/instance.rs @@ -875,7 +875,7 @@ impl Instance { /// /// Returns a `Trap` error when the range within the table is out of bounds /// or the range within the passive element is out of bounds. - pub(crate) fn table_init( + pub(crate) async fn table_init( self: Pin<&mut Self>, store: &mut StoreOpaque, table_index: TableIndex, @@ -897,9 +897,10 @@ impl Instance { src, len, ) + .await } - pub(crate) fn table_init_segment( + pub(crate) async fn table_init_segment( store: &mut StoreOpaque, elements_instance_id: InstanceId, const_evaluator: &mut ConstExprEvaluator, @@ -955,11 +956,10 @@ impl Instance { .ok_or(Trap::TableOutOfBounds)?; let mut context = ConstEvalContext::new(elements_instance_id); for (i, expr) in positions.zip(exprs) { - let element = unsafe { - const_evaluator - .eval(&mut store, &mut context, expr) - .expect("const expr should be valid") - }; + let element = const_evaluator + .eval(&mut store, &mut context, expr) + .await + .expect("const expr should be valid"); table.set_(&mut store, i, element.ref_().unwrap()).unwrap(); } } diff --git a/crates/wasmtime/src/runtime/vm/instance/allocator.rs b/crates/wasmtime/src/runtime/vm/instance/allocator.rs index c9929ab928c0..dbc6c44e86af 100644 --- a/crates/wasmtime/src/runtime/vm/instance/allocator.rs +++ b/crates/wasmtime/src/runtime/vm/instance/allocator.rs @@ -387,48 +387,28 @@ impl dyn InstanceAllocator + '_ { self.increment_core_instance_count()?; let num_defined_memories = module.num_defined_memories(); - let num_defined_tables = module.num_defined_tables(); - - let mut guard = DeallocateOnDrop { - run_deallocate: true, - memories: PrimaryMap::with_capacity(num_defined_memories), - tables: PrimaryMap::with_capacity(num_defined_tables), - allocator: self, - }; - - self.allocate_memories(&mut request, &mut guard.memories)?; - self.allocate_tables(&mut request, &mut guard.tables)?; - guard.run_deallocate = false; - // SAFETY: memories/tables were just allocated from the store within - // `request` and this function's own contract requires that the - // imports are valid. - return unsafe { - Ok(Instance::new( - request, - mem::take(&mut guard.memories), - mem::take(&mut guard.tables), - &module.memories, - )) - }; - - struct DeallocateOnDrop<'a> { - run_deallocate: bool, - memories: PrimaryMap, - tables: PrimaryMap, - allocator: &'a (dyn InstanceAllocator + 'a), - } + let mut memories = PrimaryMap::with_capacity(num_defined_memories); - impl Drop for DeallocateOnDrop<'_> { - fn drop(&mut self) { - if !self.run_deallocate { - return; - } + let num_defined_tables = module.num_defined_tables(); + let mut tables = PrimaryMap::with_capacity(num_defined_tables); + + match (|| { + self.allocate_memories(&mut request, &mut memories)?; + self.allocate_tables(&mut request, &mut tables)?; + Ok(()) + })() { + // SAFETY: memories/tables were just allocated from the store within + // `request` and this function's own contract requires that the + // imports are valid. + Ok(_) => unsafe { Ok(Instance::new(request, memories, tables, &module.memories)) }, + Err(e) => { // SAFETY: these were previously allocated by this allocator unsafe { - self.allocator.deallocate_memories(&mut self.memories); - self.allocator.deallocate_tables(&mut self.tables); + self.deallocate_memories(&mut memories); + self.deallocate_tables(&mut tables); } - self.allocator.decrement_core_instance_count(); + self.decrement_core_instance_count(); + Err(e) } } } @@ -560,11 +540,9 @@ fn check_table_init_bounds( for segment in module.table_initialization.segments.iter() { let mut context = ConstEvalContext::new(instance); - let start = unsafe { - const_evaluator - .eval(&mut store, &mut context, &segment.offset) - .expect("const expression should be valid") - }; + let start = const_evaluator + .eval_int(&mut store, &mut context, &segment.offset) + .expect("const expression should be valid"); let start = usize::try_from(start.unwrap_i32().cast_unsigned()).unwrap(); let end = start.checked_add(usize::try_from(segment.elements.len()).unwrap()); @@ -582,7 +560,7 @@ fn check_table_init_bounds( Ok(()) } -fn initialize_tables( +async fn initialize_tables( store: &mut StoreOpaque, context: &mut ConstEvalContext, const_evaluator: &mut ConstExprEvaluator, @@ -595,11 +573,10 @@ fn initialize_tables( TableInitialValue::Null { precomputed: _ } => {} TableInitialValue::Expr(expr) => { - let init = unsafe { - const_evaluator - .eval(&mut store, context, expr) - .expect("const expression should be valid") - }; + let init = const_evaluator + .eval(&mut store, context, expr) + .await + .expect("const expression should be valid"); let idx = module.table_index(table); let id = store.id(); let table = store @@ -619,11 +596,9 @@ fn initialize_tables( // iterates over all segments (Segments mode) or leftover // segments (FuncTable mode) to initialize. for segment in module.table_initialization.segments.iter() { - let start = unsafe { - const_evaluator - .eval(&mut store, context, &segment.offset) - .expect("const expression should be valid") - }; + let start = const_evaluator + .eval_int(&mut store, context, &segment.offset) + .expect("const expression should be valid"); let start = get_index( start, store.instance(context.instance).env_module().tables[segment.table_index].idx_type, @@ -637,7 +612,8 @@ fn initialize_tables( start, 0, segment.elements.len(), - )?; + ) + .await?; } Ok(()) @@ -658,12 +634,14 @@ fn get_memory_init_start( let mut context = ConstEvalContext::new(instance); let mut const_evaluator = ConstExprEvaluator::default(); let mut store = OpaqueRootScope::new(store); - unsafe { const_evaluator.eval(&mut store, &mut context, &init.offset) }.map(|v| { - get_index( - v, - store.instance(instance).env_module().memories[init.memory_index].idx_type, - ) - }) + const_evaluator + .eval_int(&mut store, &mut context, &init.offset) + .map(|v| { + get_index( + v, + store.instance(instance).env_module().memories[init.memory_index].idx_type, + ) + }) } fn check_memory_init_bounds( @@ -733,7 +711,9 @@ fn initialize_memories( expr: &wasmtime_environ::ConstExpr, ) -> Option { let mut store = OpaqueRootScope::new(&mut *self.store); - let val = unsafe { self.const_evaluator.eval(&mut store, self.context, expr) } + let val = self + .const_evaluator + .eval_int(&mut store, self.context, expr) .expect("const expression should be valid"); Some(get_index( val, @@ -803,7 +783,7 @@ fn check_init_bounds(store: &mut StoreOpaque, instance: InstanceId, module: &Mod Ok(()) } -fn initialize_globals( +async fn initialize_globals( store: &mut StoreOpaque, context: &mut ConstEvalContext, const_evaluator: &mut ConstExprEvaluator, @@ -817,9 +797,15 @@ fn initialize_globals( let mut store = OpaqueRootScope::new(store); for (index, init) in module.global_initializers.iter() { - let val = unsafe { + // Attempt a simple, synchronous evaluation before hitting the + // general-purpose `.await` point below. This benchmarks ~15% faster in + // instantiation vs just falling through to `.await` below. + let val = if let Some(val) = const_evaluator.try_simple(init) { + val + } else { const_evaluator .eval(&mut store, context, init) + .await .expect("should be a valid const expr") }; @@ -853,7 +839,7 @@ fn initialize_globals( Ok(()) } -pub fn initialize_instance( +pub async fn initialize_instance( store: &mut StoreOpaque, instance: InstanceId, module: &Module, @@ -870,8 +856,8 @@ pub fn initialize_instance( let mut context = ConstEvalContext::new(instance); let mut const_evaluator = ConstExprEvaluator::default(); - initialize_globals(store, &mut context, &mut const_evaluator, module)?; - initialize_tables(store, &mut context, &mut const_evaluator, module)?; + initialize_globals(store, &mut context, &mut const_evaluator, module).await?; + initialize_tables(store, &mut context, &mut const_evaluator, module).await?; initialize_memories(store, &mut context, &mut const_evaluator, &module)?; Ok(()) diff --git a/crates/wasmtime/src/runtime/vm/libcalls.rs b/crates/wasmtime/src/runtime/vm/libcalls.rs index f69e670138d6..3e5318d5899e 100644 --- a/crates/wasmtime/src/runtime/vm/libcalls.rs +++ b/crates/wasmtime/src/runtime/vm/libcalls.rs @@ -482,17 +482,16 @@ fn table_init( dst: u64, src: u64, len: u64, -) -> Result<(), Trap> { +) -> Result<()> { let table_index = TableIndex::from_u32(table_index); let elem_index = ElemIndex::from_u32(elem_index); - instance.table_init( - store.store_opaque_mut(), - table_index, - elem_index, - dst, - src, - len, - ) + + block_on!(store, async |store| { + instance + .table_init(store, table_index, elem_index, dst, src, len) + .await + })??; + Ok(()) } // Implementation of `elem.drop`. @@ -674,18 +673,19 @@ unsafe fn gc_alloc_raw( err.context(e) })?; - let store = store.store_opaque_mut(); - let gc_ref = unsafe { - store.retry_after_gc_maybe_async((), |store, ()| { - store - .unwrap_gc_store_mut() - .alloc_raw(header, layout)? - .map_err(|bytes_needed| crate::GcHeapOutOfMemory::new((), bytes_needed).into()) - })? - }; + block_on!(store, async |store| { + let gc_ref = store + .retry_after_gc_async((), |store, ()| { + store + .unwrap_gc_store_mut() + .alloc_raw(header, layout)? + .map_err(|bytes_needed| crate::GcHeapOutOfMemory::new((), bytes_needed).into()) + }) + .await?; - let raw = store.unwrap_gc_store_mut().expose_gc_ref_to_wasm(gc_ref); - Ok(raw) + let raw = store.unwrap_gc_store_mut().expose_gc_ref_to_wasm(gc_ref); + Ok(raw) + })? } // Intern a `funcref` into the GC heap, returning its `FuncRefTableId`. @@ -764,59 +764,60 @@ unsafe fn array_new_data( use crate::ArrayType; use wasmtime_environ::ModuleInternedTypeIndex; - let store = store.store_opaque_mut(); - let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index); - let data_index = DataIndex::from_u32(data_index); - - // Calculate the byte-length of the data (as opposed to the element-length - // of the array). - let data_range = instance.wasm_data_range(data_index); - let shared_ty = instance.engine_type_index(array_type_index); - let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty); - let one_elem_size = array_ty - .element_type() - .data_byte_size() - .expect("Wasm validation ensures that this type have a defined byte size"); - let byte_len = len - .checked_mul(one_elem_size) - .and_then(|x| usize::try_from(x).ok()) - .ok_or_else(|| Trap::MemoryOutOfBounds)?; - - // Get the data from the segment, checking bounds. - let src = usize::try_from(src).map_err(|_| Trap::MemoryOutOfBounds)?; - let data = instance - .wasm_data(data_range) - .get(src..) - .and_then(|d| d.get(..byte_len)) - .ok_or_else(|| Trap::MemoryOutOfBounds)?; - - // Allocate the (uninitialized) array. - let gc_layout = store - .engine() - .signatures() - .layout(shared_ty) - .expect("array types have GC layouts"); - let array_layout = gc_layout.unwrap_array(); - let array_ref = unsafe { - store.retry_after_gc_maybe_async((), |store, ()| { - store - .unwrap_gc_store_mut() - .alloc_uninit_array(shared_ty, len, &array_layout)? - .map_err(|bytes_needed| crate::GcHeapOutOfMemory::new((), bytes_needed).into()) - })? - }; - - // Copy the data into the array, initializing it. - store - .unwrap_gc_store_mut() - .gc_object_data(array_ref.as_gc_ref()) - .copy_from_slice(array_layout.base_size, data); + block_on!(store, async |store| { + let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index); + let data_index = DataIndex::from_u32(data_index); + + // Calculate the byte-length of the data (as opposed to the element-length + // of the array). + let data_range = instance.wasm_data_range(data_index); + let shared_ty = instance.engine_type_index(array_type_index); + let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty); + let one_elem_size = array_ty + .element_type() + .data_byte_size() + .expect("Wasm validation ensures that this type have a defined byte size"); + let byte_len = len + .checked_mul(one_elem_size) + .and_then(|x| usize::try_from(x).ok()) + .ok_or_else(|| Trap::MemoryOutOfBounds)?; + + // Get the data from the segment, checking bounds. + let src = usize::try_from(src).map_err(|_| Trap::MemoryOutOfBounds)?; + let data = instance + .wasm_data(data_range) + .get(src..) + .and_then(|d| d.get(..byte_len)) + .ok_or_else(|| Trap::MemoryOutOfBounds)?; + + // Allocate the (uninitialized) array. + let gc_layout = store + .engine() + .signatures() + .layout(shared_ty) + .expect("array types have GC layouts"); + let array_layout = gc_layout.unwrap_array(); + let array_ref = store + .retry_after_gc_async((), |store, ()| { + store + .unwrap_gc_store_mut() + .alloc_uninit_array(shared_ty, len, &array_layout)? + .map_err(|bytes_needed| crate::GcHeapOutOfMemory::new((), bytes_needed).into()) + }) + .await?; - // Return the array to Wasm! - let raw = store - .unwrap_gc_store_mut() - .expose_gc_ref_to_wasm(array_ref.into()); - Ok(raw) + // Copy the data into the array, initializing it. + store + .unwrap_gc_store_mut() + .gc_object_data(array_ref.as_gc_ref()) + .copy_from_slice(array_layout.base_size, data); + + // Return the array to Wasm! + let raw = store + .unwrap_gc_store_mut() + .expose_gc_ref_to_wasm(array_ref.into()); + Ok(raw) + })? } /// Implementation of the `array.init_data` instruction. @@ -931,47 +932,52 @@ unsafe fn array_new_elem( let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty); let pre = ArrayRefPre::_new(store, array_ty); - let mut store = OpaqueRootScope::new(&mut **store); - // Turn the elements into `Val`s. - let mut vals = Vec::with_capacity(usize::try_from(elements.len()).unwrap()); - match elements { - TableSegmentElements::Functions(fs) => { - vals.extend( - fs.get(src..) + block_on!(store, async |store| { + let mut store = OpaqueRootScope::new(store); + // Turn the elements into `Val`s. + let mut vals = Vec::with_capacity(usize::try_from(elements.len()).unwrap()); + match elements { + TableSegmentElements::Functions(fs) => { + vals.extend( + fs.get(src..) + .and_then(|s| s.get(..len)) + .ok_or_else(|| Trap::TableOutOfBounds)? + .iter() + .map(|f| { + let raw_func_ref = instance.as_mut().get_func_ref(*f); + let func = unsafe { + raw_func_ref.map(|p| Func::from_vm_func_ref(store.id(), p)) + }; + Val::FuncRef(func) + }), + ); + } + TableSegmentElements::Expressions(xs) => { + let xs = xs + .get(src..) .and_then(|s| s.get(..len)) - .ok_or_else(|| Trap::TableOutOfBounds)? - .iter() - .map(|f| { - let raw_func_ref = instance.as_mut().get_func_ref(*f); - let func = - unsafe { raw_func_ref.map(|p| Func::from_vm_func_ref(store.id(), p)) }; - Val::FuncRef(func) - }), - ); - } - TableSegmentElements::Expressions(xs) => { - let xs = xs - .get(src..) - .and_then(|s| s.get(..len)) - .ok_or_else(|| Trap::TableOutOfBounds)?; + .ok_or_else(|| Trap::TableOutOfBounds)?; - let mut const_context = ConstEvalContext::new(instance.id()); - let mut const_evaluator = ConstExprEvaluator::default(); + let mut const_context = ConstEvalContext::new(instance.id()); + let mut const_evaluator = ConstExprEvaluator::default(); - vals.extend(xs.iter().map(|x| unsafe { - *const_evaluator - .eval(&mut store, &mut const_context, x) - .expect("const expr should be valid") - })); + for x in xs.iter() { + let val = *const_evaluator + .eval(&mut store, &mut const_context, x) + .await + .expect("const expr should be valid"); + vals.push(val); + } + } } - } - let array = unsafe { ArrayRef::new_fixed_maybe_async(&mut store, &pre, &vals)? }; + let array = ArrayRef::_new_fixed_async(&mut store, &pre, &vals).await?; - let mut store = AutoAssertNoGc::new(&mut store); - let gc_ref = array.try_clone_gc_ref(&mut store)?; - let raw = store.unwrap_gc_store_mut().expose_gc_ref_to_wasm(gc_ref); - Ok(raw) + let mut store = AutoAssertNoGc::new(&mut store); + let gc_ref = array.try_clone_gc_ref(&mut store)?; + let raw = store.unwrap_gc_store_mut().expose_gc_ref_to_wasm(gc_ref); + Ok(raw) + })? } #[cfg(feature = "gc")] @@ -992,77 +998,83 @@ unsafe fn array_init_elem( }; use wasmtime_environ::{ModuleInternedTypeIndex, TableSegmentElements}; - let mut store = OpaqueRootScope::new(store.store_opaque_mut()); - - // Convert the indices into their typed forms. - let _array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index); - let elem_index = ElemIndex::from_u32(elem_index); - - log::trace!( - "array.init_elem(array={array:#x}, dst={dst}, elem_index={elem_index:?}, src={src}, len={len})", - ); - - // Convert the raw GC ref into a `Rooted`. - let array = VMGcRef::from_raw_u32(array).ok_or_else(|| Trap::NullReference)?; - let array = store.unwrap_gc_store_mut().clone_gc_ref(&array); - let array = { - let mut no_gc = AutoAssertNoGc::new(&mut store); - ArrayRef::from_cloned_gc_ref(&mut no_gc, array) - }; - - // Bounds check the destination within the array. - let array_len = array._len(&store)?; - log::trace!("array_len = {array_len}"); - if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > array_len { - return Err(Trap::ArrayOutOfBounds.into()); - } + block_on!(store, async |store| { + let mut store = OpaqueRootScope::new(store); + + // Convert the indices into their typed forms. + let _array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index); + let elem_index = ElemIndex::from_u32(elem_index); + + log::trace!( + "array.init_elem(array={array:#x}, dst={dst}, elem_index={elem_index:?}, src={src}, len={len})", + ); + + // Convert the raw GC ref into a `Rooted`. + let array = VMGcRef::from_raw_u32(array).ok_or_else(|| Trap::NullReference)?; + let array = store.unwrap_gc_store_mut().clone_gc_ref(&array); + let array = { + let mut no_gc = AutoAssertNoGc::new(&mut store); + ArrayRef::from_cloned_gc_ref(&mut no_gc, array) + }; - // Get the passive element segment. - let mut storage = None; - let elements = instance.passive_element_segment(&mut storage, elem_index); + // Bounds check the destination within the array. + let array_len = array._len(&store)?; + log::trace!("array_len = {array_len}"); + if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > array_len { + return Err(Trap::ArrayOutOfBounds.into()); + } - // Convert array offsets into `usize`s. - let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?; - let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?; + // Get the passive element segment. + let mut storage = None; + let elements = instance.passive_element_segment(&mut storage, elem_index); - // Turn the elements into `Val`s. - let vals = match elements { - TableSegmentElements::Functions(fs) => fs - .get(src..) - .and_then(|s| s.get(..len)) - .ok_or_else(|| Trap::TableOutOfBounds)? - .iter() - .map(|f| { - let raw_func_ref = instance.as_mut().get_func_ref(*f); - let func = unsafe { raw_func_ref.map(|p| Func::from_vm_func_ref(store.id(), p)) }; - Val::FuncRef(func) - }) - .collect::>(), - TableSegmentElements::Expressions(xs) => { - let mut const_context = ConstEvalContext::new(instance.id()); - let mut const_evaluator = ConstExprEvaluator::default(); + // Convert array offsets into `usize`s. + let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?; + let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?; - xs.get(src..) + // Turn the elements into `Val`s. + let vals = match elements { + TableSegmentElements::Functions(fs) => fs + .get(src..) .and_then(|s| s.get(..len)) .ok_or_else(|| Trap::TableOutOfBounds)? .iter() - .map(|x| unsafe { - *const_evaluator - .eval(&mut store, &mut const_context, x) - .expect("const expr should be valid") + .map(|f| { + let raw_func_ref = instance.as_mut().get_func_ref(*f); + let func = + unsafe { raw_func_ref.map(|p| Func::from_vm_func_ref(store.id(), p)) }; + Val::FuncRef(func) }) - .collect::>() - } - }; + .collect::>(), + TableSegmentElements::Expressions(xs) => { + let mut const_context = ConstEvalContext::new(instance.id()); + let mut const_evaluator = ConstExprEvaluator::default(); + + let mut vals = Vec::new(); + for x in xs + .get(src..) + .and_then(|s| s.get(..len)) + .ok_or_else(|| Trap::TableOutOfBounds)? + { + let val = *const_evaluator + .eval(&mut store, &mut const_context, x) + .await + .expect("const expr should be valid"); + vals.push(val); + } + vals + } + }; - // Copy the values into the array. - for (i, val) in vals.into_iter().enumerate() { - let i = u32::try_from(i).unwrap(); - let j = dst.checked_add(i).unwrap(); - array._set(&mut store, j, val)?; - } + // Copy the values into the array. + for (i, val) in vals.into_iter().enumerate() { + let i = u32::try_from(i).unwrap(); + let j = dst.checked_add(i).unwrap(); + array._set(&mut store, j, val)?; + } - Ok(()) + Ok(()) + })? } // TODO: Specialize this libcall for only non-GC array elements, so we never From a98ed15a89fbdbd7635d56b07f547fb5ee06a708 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Tue, 19 Aug 2025 15:57:30 -0700 Subject: [PATCH 2/3] Fix configured build --- crates/wasmtime/src/runtime/gc/enabled/arrayref.rs | 2 -- crates/wasmtime/src/runtime/gc/enabled/structref.rs | 1 - crates/wasmtime/src/runtime/instance.rs | 3 +++ crates/wasmtime/src/runtime/store/gc.rs | 3 --- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/crates/wasmtime/src/runtime/gc/enabled/arrayref.rs b/crates/wasmtime/src/runtime/gc/enabled/arrayref.rs index a5bcbd0e8b89..8c5e1f16a79d 100644 --- a/crates/wasmtime/src/runtime/gc/enabled/arrayref.rs +++ b/crates/wasmtime/src/runtime/gc/enabled/arrayref.rs @@ -353,7 +353,6 @@ impl ArrayRef { Self::_new_async(store.as_context_mut().0, allocator, elem, len).await } - #[cfg(feature = "async")] pub(crate) async fn _new_async( store: &mut StoreOpaque, allocator: &ArrayRefPre, @@ -512,7 +511,6 @@ impl ArrayRef { Self::_new_fixed_async(store.as_context_mut().0, allocator, elems).await } - #[cfg(feature = "async")] pub(crate) async fn _new_fixed_async( store: &mut StoreOpaque, allocator: &ArrayRefPre, diff --git a/crates/wasmtime/src/runtime/gc/enabled/structref.rs b/crates/wasmtime/src/runtime/gc/enabled/structref.rs index 53f03601cd07..3c628f716c0e 100644 --- a/crates/wasmtime/src/runtime/gc/enabled/structref.rs +++ b/crates/wasmtime/src/runtime/gc/enabled/structref.rs @@ -280,7 +280,6 @@ impl StructRef { Self::_new_async(store.as_context_mut().0, allocator, fields).await } - #[cfg(feature = "async")] pub(crate) async fn _new_async( store: &mut StoreOpaque, allocator: &StructRefPre, diff --git a/crates/wasmtime/src/runtime/instance.rs b/crates/wasmtime/src/runtime/instance.rs index 0666c9f99355..bc7adff0e862 100644 --- a/crates/wasmtime/src/runtime/instance.rs +++ b/crates/wasmtime/src/runtime/instance.rs @@ -378,12 +378,15 @@ impl Instance { .contains(WasmFeatures::BULK_MEMORY); if store.async_support() { + #[cfg(feature = "async")] store.block_on(|store| { let module = compiled_module.module().clone(); Box::pin( async move { vm::initialize_instance(store, id, &module, bulk_memory).await }, ) })??; + #[cfg(not(feature = "async"))] + unreachable!(); } else { vm::assert_ready(vm::initialize_instance( store, diff --git a/crates/wasmtime/src/runtime/store/gc.rs b/crates/wasmtime/src/runtime/store/gc.rs index 93614a730200..1947b164a370 100644 --- a/crates/wasmtime/src/runtime/store/gc.rs +++ b/crates/wasmtime/src/runtime/store/gc.rs @@ -197,10 +197,7 @@ impl StoreOpaque { }, } } -} -#[cfg(feature = "async")] -impl StoreOpaque { /// Attempt an allocation, if it fails due to GC OOM, then do a GC and /// retry. pub(crate) async fn retry_after_gc_async( From 2605f21d1245efbb7376448360a1789cb268ef27 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 20 Aug 2025 09:23:15 -0700 Subject: [PATCH 3/3] Undo rebase mistake --- .../src/runtime/vm/instance/allocator.rs | 56 +++++++++++++------ 1 file changed, 38 insertions(+), 18 deletions(-) diff --git a/crates/wasmtime/src/runtime/vm/instance/allocator.rs b/crates/wasmtime/src/runtime/vm/instance/allocator.rs index dbc6c44e86af..8e01045458ac 100644 --- a/crates/wasmtime/src/runtime/vm/instance/allocator.rs +++ b/crates/wasmtime/src/runtime/vm/instance/allocator.rs @@ -387,28 +387,48 @@ impl dyn InstanceAllocator + '_ { self.increment_core_instance_count()?; let num_defined_memories = module.num_defined_memories(); - let mut memories = PrimaryMap::with_capacity(num_defined_memories); - let num_defined_tables = module.num_defined_tables(); - let mut tables = PrimaryMap::with_capacity(num_defined_tables); - - match (|| { - self.allocate_memories(&mut request, &mut memories)?; - self.allocate_tables(&mut request, &mut tables)?; - Ok(()) - })() { - // SAFETY: memories/tables were just allocated from the store within - // `request` and this function's own contract requires that the - // imports are valid. - Ok(_) => unsafe { Ok(Instance::new(request, memories, tables, &module.memories)) }, - Err(e) => { + + let mut guard = DeallocateOnDrop { + run_deallocate: true, + memories: PrimaryMap::with_capacity(num_defined_memories), + tables: PrimaryMap::with_capacity(num_defined_tables), + allocator: self, + }; + + self.allocate_memories(&mut request, &mut guard.memories)?; + self.allocate_tables(&mut request, &mut guard.tables)?; + guard.run_deallocate = false; + // SAFETY: memories/tables were just allocated from the store within + // `request` and this function's own contract requires that the + // imports are valid. + return unsafe { + Ok(Instance::new( + request, + mem::take(&mut guard.memories), + mem::take(&mut guard.tables), + &module.memories, + )) + }; + + struct DeallocateOnDrop<'a> { + run_deallocate: bool, + memories: PrimaryMap, + tables: PrimaryMap, + allocator: &'a (dyn InstanceAllocator + 'a), + } + + impl Drop for DeallocateOnDrop<'_> { + fn drop(&mut self) { + if !self.run_deallocate { + return; + } // SAFETY: these were previously allocated by this allocator unsafe { - self.deallocate_memories(&mut memories); - self.deallocate_tables(&mut tables); + self.allocator.deallocate_memories(&mut self.memories); + self.allocator.deallocate_tables(&mut self.tables); } - self.decrement_core_instance_count(); - Err(e) + self.allocator.decrement_core_instance_count(); } } }