Update wasmtime to 0.29.0 (#9552)

* Start

* Move to ctx

* Make it compile for now

* More work

* Get rid off state-holder

* Use less Refcells

* 🤦

* Don't use RefCell

* Use names for parameters

* Fixes after merge

* Fixes after merge

* Review feedback

* FMT
This commit is contained in:
Bastian Köcher
2021-09-29 14:30:46 +02:00
committed by GitHub
parent e64693933f
commit 2deed49706
19 changed files with 451 additions and 430 deletions
@@ -19,20 +19,18 @@
//! Defines data and logic needed for interaction with an WebAssembly instance of a substrate
//! runtime module.
use crate::{
imports::Imports,
util::{from_wasmtime_val, into_wasmtime_val},
};
use crate::imports::Imports;
use sc_executor_common::{
error::{Error, Result},
runtime_blob,
util::checked_range,
wasm_runtime::InvokeMethod,
};
use sp_wasm_interface::{Pointer, Value, WordSize};
use std::{marker, slice};
use wasmtime::{Extern, Func, Global, Instance, Memory, Module, Store, Table, Val};
use std::marker;
use wasmtime::{
AsContext, AsContextMut, Extern, Func, Global, Instance, Memory, Module, Table, Val,
};
/// Invoked entrypoint format.
pub enum EntryPointType {
@@ -58,7 +56,12 @@ pub struct EntryPoint {
impl EntryPoint {
/// Call this entry point.
pub fn call(&self, data_ptr: Pointer<u8>, data_len: WordSize) -> Result<u64> {
pub fn call(
&self,
ctx: impl AsContextMut,
data_ptr: Pointer<u8>,
data_len: WordSize,
) -> Result<u64> {
let data_ptr = u32::from(data_ptr);
let data_len = u32::from(data_len);
@@ -68,15 +71,18 @@ impl EntryPoint {
match self.call_type {
EntryPointType::Direct { ref entrypoint } =>
entrypoint.call((data_ptr, data_len)).map_err(handle_trap),
entrypoint.call(ctx, (data_ptr, data_len)).map_err(handle_trap),
EntryPointType::Wrapped { func, ref dispatcher } =>
dispatcher.call((func, data_ptr, data_len)).map_err(handle_trap),
dispatcher.call(ctx, (func, data_ptr, data_len)).map_err(handle_trap),
}
}
pub fn direct(func: wasmtime::Func) -> std::result::Result<Self, &'static str> {
pub fn direct(
func: wasmtime::Func,
ctx: impl AsContext,
) -> std::result::Result<Self, &'static str> {
let entrypoint = func
.typed::<(u32, u32), u64>()
.typed::<(u32, u32), u64, _>(ctx)
.map_err(|_| "Invalid signature for direct entry point")?
.clone();
Ok(Self { call_type: EntryPointType::Direct { entrypoint } })
@@ -85,9 +91,10 @@ impl EntryPoint {
pub fn wrapped(
dispatcher: wasmtime::Func,
func: u32,
ctx: impl AsContext,
) -> std::result::Result<Self, &'static str> {
let dispatcher = dispatcher
.typed::<(u32, u32, u32), u64>()
.typed::<(u32, u32, u32), u64, _>(ctx)
.map_err(|_| "Invalid signature for wrapped entry point")?
.clone();
Ok(Self { call_type: EntryPointType::Wrapped { func, dispatcher } })
@@ -144,8 +151,13 @@ fn extern_func(extern_: &Extern) -> Option<&Func> {
impl InstanceWrapper {
/// Create a new instance wrapper from the given wasm module.
pub fn new(store: &Store, module: &Module, imports: &Imports, heap_pages: u32) -> Result<Self> {
let instance = Instance::new(store, module, &imports.externs)
pub fn new(
module: &Module,
imports: &Imports,
heap_pages: u32,
mut ctx: impl AsContextMut,
) -> Result<Self> {
let instance = Instance::new(&mut ctx, module, &imports.externs)
.map_err(|e| Error::from(format!("cannot instantiate: {}", e)))?;
let memory = match imports.memory_import_index {
@@ -153,51 +165,55 @@ impl InstanceWrapper {
.expect("only memory can be at the `memory_idx`; qed")
.clone(),
None => {
let memory = get_linear_memory(&instance)?;
if !memory.grow(heap_pages).is_ok() {
return Err("failed to increase the linear memory size".into())
let memory = get_linear_memory(&instance, &mut ctx)?;
if !memory.grow(&mut ctx, heap_pages).is_ok() {
return Err("failed top increase the linear memory size".into())
}
memory
},
};
Ok(Self {
table: get_table(&instance),
instance,
memory,
_not_send_nor_sync: marker::PhantomData,
})
let table = get_table(&instance, ctx);
Ok(Self { table, instance, memory, _not_send_nor_sync: marker::PhantomData })
}
/// Resolves a substrate entrypoint by the given name.
///
/// An entrypoint must have a signature `(i32, i32) -> i64`, otherwise this function will return
/// an error.
pub fn resolve_entrypoint(&self, method: InvokeMethod) -> Result<EntryPoint> {
pub fn resolve_entrypoint(
&self,
method: InvokeMethod,
mut ctx: impl AsContextMut,
) -> Result<EntryPoint> {
Ok(match method {
InvokeMethod::Export(method) => {
// Resolve the requested method and verify that it has a proper signature.
let export = self.instance.get_export(method).ok_or_else(|| {
let export = self.instance.get_export(&mut ctx, method).ok_or_else(|| {
Error::from(format!("Exported method {} is not found", method))
})?;
let func = extern_func(&export)
.ok_or_else(|| Error::from(format!("Export {} is not a function", method)))?
.clone();
EntryPoint::direct(func).map_err(|_| {
EntryPoint::direct(func, ctx).map_err(|_| {
Error::from(format!("Exported function '{}' has invalid signature.", method))
})?
},
InvokeMethod::Table(func_ref) => {
let table =
self.instance.get_table("__indirect_function_table").ok_or(Error::NoTable)?;
let val = table.get(func_ref).ok_or(Error::NoTableEntryWithIndex(func_ref))?;
let table = self
.instance
.get_table(&mut ctx, "__indirect_function_table")
.ok_or(Error::NoTable)?;
let val =
table.get(&mut ctx, func_ref).ok_or(Error::NoTableEntryWithIndex(func_ref))?;
let func = val
.funcref()
.ok_or(Error::TableElementIsNotAFunction(func_ref))?
.ok_or(Error::FunctionRefIsNull(func_ref))?
.clone();
EntryPoint::direct(func).map_err(|_| {
EntryPoint::direct(func, ctx).map_err(|_| {
Error::from(format!(
"Function @{} in exported table has invalid signature for direct call.",
func_ref,
@@ -205,10 +221,12 @@ impl InstanceWrapper {
})?
},
InvokeMethod::TableWithWrapper { dispatcher_ref, func } => {
let table =
self.instance.get_table("__indirect_function_table").ok_or(Error::NoTable)?;
let table = self
.instance
.get_table(&mut ctx, "__indirect_function_table")
.ok_or(Error::NoTable)?;
let val = table
.get(dispatcher_ref)
.get(&mut ctx, dispatcher_ref)
.ok_or(Error::NoTableEntryWithIndex(dispatcher_ref))?;
let dispatcher = val
.funcref()
@@ -216,7 +234,7 @@ impl InstanceWrapper {
.ok_or(Error::FunctionRefIsNull(dispatcher_ref))?
.clone();
EntryPoint::wrapped(dispatcher, func).map_err(|_| {
EntryPoint::wrapped(dispatcher, func, ctx).map_err(|_| {
Error::from(format!(
"Function @{} in exported table has invalid signature for wrapped call.",
dispatcher_ref,
@@ -234,17 +252,17 @@ impl InstanceWrapper {
/// Reads `__heap_base: i32` global variable and returns it.
///
/// If it doesn't exist, not a global or of not i32 type returns an error.
pub fn extract_heap_base(&self) -> Result<u32> {
pub fn extract_heap_base(&self, mut ctx: impl AsContextMut) -> Result<u32> {
let heap_base_export = self
.instance
.get_export("__heap_base")
.get_export(&mut ctx, "__heap_base")
.ok_or_else(|| Error::from("__heap_base is not found"))?;
let heap_base_global = extern_global(&heap_base_export)
.ok_or_else(|| Error::from("__heap_base is not a global"))?;
let heap_base = heap_base_global
.get()
.get(&mut ctx)
.i32()
.ok_or_else(|| Error::from("__heap_base is not a i32"))?;
@@ -252,15 +270,15 @@ impl InstanceWrapper {
}
/// Get the value from a global with the given `name`.
pub fn get_global_val(&self, name: &str) -> Result<Option<Value>> {
let global = match self.instance.get_export(name) {
pub fn get_global_val(&self, mut ctx: impl AsContextMut, name: &str) -> Result<Option<Value>> {
let global = match self.instance.get_export(&mut ctx, name) {
Some(global) => global,
None => return Ok(None),
};
let global = extern_global(&global).ok_or_else(|| format!("`{}` is not a global", name))?;
match global.get() {
match global.get(ctx) {
Val::I32(val) => Ok(Some(Value::I32(val))),
Val::I64(val) => Ok(Some(Value::I64(val))),
Val::F32(val) => Ok(Some(Value::F32(val))),
@@ -268,12 +286,17 @@ impl InstanceWrapper {
_ => Err("Unknown value type".into()),
}
}
/// Get a global with the given `name`.
pub fn get_global(&self, ctx: impl AsContextMut, name: &str) -> Option<wasmtime::Global> {
self.instance.get_global(ctx, name)
}
}
/// Extract linear memory instance from the given instance.
fn get_linear_memory(instance: &Instance) -> Result<Memory> {
fn get_linear_memory(instance: &Instance, ctx: impl AsContextMut) -> Result<Memory> {
let memory_export = instance
.get_export("memory")
.get_export(ctx, "memory")
.ok_or_else(|| Error::from("memory is not exported under `memory` name"))?;
let memory = extern_memory(&memory_export)
@@ -284,9 +307,9 @@ fn get_linear_memory(instance: &Instance) -> Result<Memory> {
}
/// Extract the table from the given instance if any.
fn get_table(instance: &Instance) -> Option<Table> {
fn get_table(instance: &Instance, ctx: impl AsContextMut) -> Option<Table> {
instance
.get_export("__indirect_function_table")
.get_export(ctx, "__indirect_function_table")
.as_ref()
.and_then(extern_table)
.cloned()
@@ -297,12 +320,17 @@ impl InstanceWrapper {
/// Read data from a slice of memory into a newly allocated buffer.
///
/// Returns an error if the read would go out of the memory bounds.
pub fn read_memory(&self, source_addr: Pointer<u8>, size: usize) -> Result<Vec<u8>> {
let range = checked_range(source_addr.into(), size, self.memory.data_size())
pub fn read_memory(
&self,
ctx: impl AsContext,
source_addr: Pointer<u8>,
size: usize,
) -> Result<Vec<u8>> {
let range = checked_range(source_addr.into(), size, self.memory.data_size(&ctx))
.ok_or_else(|| Error::Other("memory read is out of bounds".into()))?;
let mut buffer = vec![0; range.len()];
self.read_memory_into(source_addr, &mut buffer)?;
self.read_memory_into(ctx, source_addr, &mut buffer)?;
Ok(buffer)
}
@@ -310,33 +338,35 @@ impl InstanceWrapper {
/// Read data from the instance memory into a slice.
///
/// Returns an error if the read would go out of the memory bounds.
pub fn read_memory_into(&self, source_addr: Pointer<u8>, dest: &mut [u8]) -> Result<()> {
unsafe {
// This should be safe since we don't grow up memory while caching this reference and
// we give up the reference before returning from this function.
let memory = self.memory_as_slice();
pub fn read_memory_into(
&self,
ctx: impl AsContext,
address: Pointer<u8>,
dest: &mut [u8],
) -> Result<()> {
let memory = self.memory.data(ctx.as_context());
let range = checked_range(source_addr.into(), dest.len(), memory.len())
.ok_or_else(|| Error::Other("memory read is out of bounds".into()))?;
dest.copy_from_slice(&memory[range]);
Ok(())
}
let range = checked_range(address.into(), dest.len(), memory.len())
.ok_or_else(|| Error::Other("memory read is out of bounds".into()))?;
dest.copy_from_slice(&memory[range]);
Ok(())
}
/// Write data to the instance memory from a slice.
///
/// Returns an error if the write would go out of the memory bounds.
pub fn write_memory_from(&self, dest_addr: Pointer<u8>, data: &[u8]) -> Result<()> {
unsafe {
// This should be safe since we don't grow up memory while caching this reference and
// we give up the reference before returning from this function.
let memory = self.memory_as_slice_mut();
pub fn write_memory_from(
&self,
mut ctx: impl AsContextMut,
address: Pointer<u8>,
data: &[u8],
) -> Result<()> {
let memory = self.memory.data_mut(ctx.as_context_mut());
let range = checked_range(dest_addr.into(), data.len(), memory.len())
.ok_or_else(|| Error::Other("memory write is out of bounds".into()))?;
memory[range].copy_from_slice(data);
Ok(())
}
let range = checked_range(address.into(), data.len(), memory.len())
.ok_or_else(|| Error::Other("memory write is out of bounds".into()))?;
memory[range].copy_from_slice(data);
Ok(())
}
/// Allocate some memory of the given size. Returns pointer to the allocated memory region.
@@ -345,16 +375,13 @@ impl InstanceWrapper {
/// to get more details.
pub fn allocate(
&self,
mut ctx: impl AsContextMut,
allocator: &mut sc_allocator::FreeingBumpHeapAllocator,
size: WordSize,
) -> Result<Pointer<u8>> {
unsafe {
// This should be safe since we don't grow up memory while caching this reference and
// we give up the reference before returning from this function.
let memory = self.memory_as_slice_mut();
let memory = self.memory.data_mut(ctx.as_context_mut());
allocator.allocate(memory, size).map_err(Into::into)
}
allocator.allocate(memory, size).map_err(Into::into)
}
/// Deallocate the memory pointed by the given pointer.
@@ -362,64 +389,25 @@ impl InstanceWrapper {
/// Returns `Err` in case the given memory region cannot be deallocated.
pub fn deallocate(
&self,
mut ctx: impl AsContextMut,
allocator: &mut sc_allocator::FreeingBumpHeapAllocator,
ptr: Pointer<u8>,
) -> Result<()> {
unsafe {
// This should be safe since we don't grow up memory while caching this reference and
// we give up the reference before returning from this function.
let memory = self.memory_as_slice_mut();
let memory = self.memory.data_mut(ctx.as_context_mut());
allocator.deallocate(memory, ptr).map_err(Into::into)
}
}
/// Returns linear memory of the wasm instance as a slice.
///
/// # Safety
///
/// Wasmtime doesn't provide comprehensive documentation about the exact behavior of the data
/// pointer. If a dynamic style heap is used the base pointer of the heap can change. Since
/// growing, we cannot guarantee the lifetime of the returned slice reference.
unsafe fn memory_as_slice(&self) -> &[u8] {
let ptr = self.memory.data_ptr() as *const _;
let len = self.memory.data_size();
if len == 0 {
&[]
} else {
slice::from_raw_parts(ptr, len)
}
}
/// Returns linear memory of the wasm instance as a slice.
///
/// # Safety
///
/// See `[memory_as_slice]`. In addition to those requirements, since a mutable reference is
/// returned it must be ensured that only one mutable and no shared references to memory exists
/// at the same time.
unsafe fn memory_as_slice_mut(&self) -> &mut [u8] {
let ptr = self.memory.data_ptr();
let len = self.memory.data_size();
if len == 0 {
&mut []
} else {
slice::from_raw_parts_mut(ptr, len)
}
allocator.deallocate(memory, ptr).map_err(Into::into)
}
/// Returns the pointer to the first byte of the linear memory for this instance.
pub fn base_ptr(&self) -> *const u8 {
self.memory.data_ptr()
pub fn base_ptr(&self, ctx: impl AsContext) -> *const u8 {
self.memory.data_ptr(ctx)
}
/// Removes physical backing from the allocated linear memory. This leads to returning the
/// memory back to the system. While the memory is zeroed this is considered as a side-effect
/// and is not relied upon. Thus this function acts as a hint.
pub fn decommit(&self) {
if self.memory.data_size() == 0 {
pub fn decommit(&self, ctx: impl AsContext) {
if self.memory.data_size(&ctx) == 0 {
return
}
@@ -428,8 +416,8 @@ impl InstanceWrapper {
use std::sync::Once;
unsafe {
let ptr = self.memory.data_ptr();
let len = self.memory.data_size();
let ptr = self.memory.data_ptr(&ctx);
let len = self.memory.data_size(ctx);
// Linux handles MADV_DONTNEED reliably. The result is that the given area
// is unmapped and will be zeroed on the next pagefault.
@@ -447,23 +435,3 @@ impl InstanceWrapper {
}
}
}
impl runtime_blob::InstanceGlobals for InstanceWrapper {
type Global = wasmtime::Global;
fn get_global(&self, export_name: &str) -> Self::Global {
self.instance
.get_global(export_name)
.expect("get_global is guaranteed to be called with an export name of a global; qed")
}
fn get_global_value(&self, global: &Self::Global) -> Value {
from_wasmtime_val(global.get())
}
fn set_global_value(&self, global: &Self::Global, value: Value) {
global.set(into_wasmtime_val(value)).expect(
"the value is guaranteed to be of the same value; the global is guaranteed to be mutable; qed",
);
}
}