mirror of
https://github.com/fluencelabs/wasmer
synced 2024-12-05 02:20:19 +00:00
Dumping stack through import.
This commit is contained in:
parent
ddd0653a25
commit
00b6bf632a
21
examples/single_pass_tests/read_stack.wat
Normal file
21
examples/single_pass_tests/read_stack.wat
Normal file
@ -0,0 +1,21 @@
|
||||
(module
|
||||
(type $t1 (func))
|
||||
(func $stack_read (import "wasi_unstable" "stack_read") (type $t1))
|
||||
|
||||
(func $_start (export "_start")
|
||||
(if (i32.ne (call $fib (i32.const 10)) (i32.const 55))
|
||||
(then unreachable)
|
||||
)
|
||||
)
|
||||
|
||||
(func $fib (param $x i32) (result i32)
|
||||
(call $stack_read)
|
||||
(if (result i32) (i32.or (i32.eq (get_local $x) (i32.const 1)) (i32.eq (get_local $x) (i32.const 2)))
|
||||
(then (i32.const 1))
|
||||
(else (i32.add
|
||||
(call $fib (i32.sub (get_local $x) (i32.const 1)))
|
||||
(call $fib (i32.sub (get_local $x) (i32.const 2)))
|
||||
))
|
||||
)
|
||||
)
|
||||
)
|
@ -4,7 +4,7 @@ use crate::{
|
||||
typed_func::Wasm,
|
||||
types::{LocalFuncIndex, SigIndex},
|
||||
vm,
|
||||
state::FunctionStateMap,
|
||||
state::ModuleStateMap,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -85,10 +85,9 @@ pub trait RunnableModule: Send + Sync {
|
||||
local_func_index: LocalFuncIndex,
|
||||
) -> Option<NonNull<vm::Func>>;
|
||||
|
||||
fn get_func_statemap(
|
||||
fn get_module_state_map(
|
||||
&self,
|
||||
_local_func_index: LocalFuncIndex,
|
||||
) -> Option<FunctionStateMap> { None }
|
||||
) -> Option<ModuleStateMap> { None }
|
||||
|
||||
/// A wasm trampoline contains the necesarry data to dynamically call an exported wasm function.
|
||||
/// Given a particular signature index, we are returned a trampoline that is matched with that
|
||||
|
@ -1,3 +1,6 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::ops::Bound::{Included, Unbounded};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct RegisterIndex(pub usize);
|
||||
|
||||
@ -30,6 +33,26 @@ pub struct FunctionStateMap {
|
||||
pub initial: MachineState,
|
||||
pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64
|
||||
pub diffs: Vec<MachineStateDiff>,
|
||||
pub loop_offsets: BTreeMap<usize, usize>, /* offset -> diff_id */
|
||||
pub call_offsets: BTreeMap<usize, usize>, /* offset -> diff_id */
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ModuleStateMap {
|
||||
pub local_functions: BTreeMap<usize, FunctionStateMap>,
|
||||
pub total_size: usize,
|
||||
}
|
||||
|
||||
impl ModuleStateMap {
|
||||
pub fn lookup_call_ip(&self, ip: usize, base: usize) -> Option<(&FunctionStateMap, MachineState)> {
|
||||
if ip < base || ip - base >= self.total_size {
|
||||
None
|
||||
} else {
|
||||
//println!("lookup ip: {} in {:?}", ip - base, self.local_functions);
|
||||
let fsm = self.local_functions.range((Unbounded, Included(&(ip - base)))).last().map(|x| x.1).unwrap();
|
||||
Some((fsm, fsm.call_offsets.get(&(ip - base)).map(|x| fsm.diffs[*x].build_state(fsm)).unwrap()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FunctionStateMap {
|
||||
@ -38,6 +61,8 @@ impl FunctionStateMap {
|
||||
initial,
|
||||
shadow_size,
|
||||
diffs: vec![],
|
||||
loop_offsets: BTreeMap::new(),
|
||||
call_offsets: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -99,6 +124,34 @@ pub mod x64 {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_stack(msm: &ModuleStateMap, code_base: usize, mut stack: *const u64) {
|
||||
for i in 0.. {
|
||||
unsafe {
|
||||
let ret_addr = *stack;
|
||||
stack = stack.offset(1);
|
||||
let (fsm, state) = match msm.lookup_call_ip(ret_addr as usize, code_base) {
|
||||
Some(x) => x,
|
||||
_ => break
|
||||
};
|
||||
let mut found_shadow = false;
|
||||
for v in &state.stack_values {
|
||||
match *v {
|
||||
MachineValue::ExplicitShadow => {
|
||||
stack = stack.offset((fsm.shadow_size / 8) as isize);
|
||||
found_shadow = true;
|
||||
}
|
||||
_ => {
|
||||
stack = stack.offset(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
assert_eq!(found_shadow, true);
|
||||
stack = stack.offset(1); // RBP
|
||||
println!("Frame #{}: {:p} {:?}", i, ret_addr as *const u8, state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(u8)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum GPR {
|
||||
|
@ -8,6 +8,8 @@
|
||||
|
||||
use crate::loader::CodeMemory;
|
||||
use std::{mem, slice};
|
||||
use crate::vm::Ctx;
|
||||
use std::fmt;
|
||||
|
||||
lazy_static! {
|
||||
/// Reads the context pointer from `mm0`.
|
||||
@ -98,6 +100,31 @@ impl TrampolineBufferBuilder {
|
||||
idx
|
||||
}
|
||||
|
||||
pub fn add_context_rsp_trampoline(
|
||||
&mut self,
|
||||
target: unsafe extern "C" fn (&mut Ctx, *const CallContext, *const u64),
|
||||
context: *const CallContext,
|
||||
) -> usize {
|
||||
let idx = self.offsets.len();
|
||||
self.offsets.push(self.code.len());
|
||||
self.code.extend_from_slice(&[
|
||||
0x48, 0xbe, // movabsq ?, %rsi
|
||||
]);
|
||||
self.code.extend_from_slice(value_to_bytes(&context));
|
||||
self.code.extend_from_slice(&[
|
||||
0x48, 0x89, 0xe2, // mov %rsp, %rdx
|
||||
]);
|
||||
|
||||
self.code.extend_from_slice(&[
|
||||
0x48, 0xb8, // movabsq ?, %rax
|
||||
]);
|
||||
self.code.extend_from_slice(value_to_bytes(&target));
|
||||
self.code.extend_from_slice(&[
|
||||
0xff, 0xe0, // jmpq *%rax
|
||||
]);
|
||||
idx
|
||||
}
|
||||
|
||||
/// Adds a callinfo trampoline.
|
||||
///
|
||||
/// This generates a trampoline function that collects `num_params` parameters into an array
|
||||
@ -196,6 +223,12 @@ impl TrampolineBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for TrampolineBuffer {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "TrampolineBuffer {{}}")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -364,30 +364,35 @@ macro_rules! impl_traits {
|
||||
impl< $( $x: WasmExternType, )* Rets: WasmTypeList, Trap: TrapEarly<Rets>, FN: Fn( &mut Ctx $( ,$x )* ) -> Trap> ExternalFunction<($( $x ),*), Rets> for FN {
|
||||
#[allow(non_snake_case)]
|
||||
fn to_raw(&self) -> NonNull<vm::Func> {
|
||||
assert_eq!(mem::size_of::<Self>(), 0, "you cannot use a closure that captures state for `Func`.");
|
||||
if mem::size_of::<Self>() == 0 {
|
||||
/// This is required for the llvm backend to be able to unwind through this function.
|
||||
#[cfg_attr(nightly, unwind(allowed))]
|
||||
extern fn wrap<$( $x: WasmExternType, )* Rets: WasmTypeList, Trap: TrapEarly<Rets>, FN: Fn( &mut Ctx $( ,$x )* ) -> Trap>( ctx: &mut Ctx $( ,$x: <$x as WasmExternType>::Native )* ) -> Rets::CStruct {
|
||||
let f: FN = unsafe { mem::transmute_copy(&()) };
|
||||
|
||||
/// This is required for the llvm backend to be able to unwind through this function.
|
||||
#[cfg_attr(nightly, unwind(allowed))]
|
||||
extern fn wrap<$( $x: WasmExternType, )* Rets: WasmTypeList, Trap: TrapEarly<Rets>, FN: Fn( &mut Ctx $( ,$x )* ) -> Trap>( ctx: &mut Ctx $( ,$x: <$x as WasmExternType>::Native )* ) -> Rets::CStruct {
|
||||
let f: FN = unsafe { mem::transmute_copy(&()) };
|
||||
let err = match panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
f( ctx $( ,WasmExternType::from_native($x) )* ).report()
|
||||
})) {
|
||||
Ok(Ok(returns)) => return returns.into_c_struct(),
|
||||
Ok(Err(err)) => {
|
||||
let b: Box<_> = err.into();
|
||||
b as Box<dyn Any>
|
||||
},
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
let err = match panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
f( ctx $( ,WasmExternType::from_native($x) )* ).report()
|
||||
})) {
|
||||
Ok(Ok(returns)) => return returns.into_c_struct(),
|
||||
Ok(Err(err)) => {
|
||||
let b: Box<_> = err.into();
|
||||
b as Box<dyn Any>
|
||||
},
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
unsafe {
|
||||
(&*ctx.module).runnable_module.do_early_trap(err)
|
||||
unsafe {
|
||||
(&*ctx.module).runnable_module.do_early_trap(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
NonNull::new(wrap::<$( $x, )* Rets, Trap, Self> as *mut vm::Func).unwrap()
|
||||
NonNull::new(wrap::<$( $x, )* Rets, Trap, Self> as *mut vm::Func).unwrap()
|
||||
} else {
|
||||
assert_eq!(mem::size_of::<Self>(), mem::size_of::<usize>(), "you cannot use a closure that captures state for `Func`.");
|
||||
NonNull::new(unsafe {
|
||||
::std::mem::transmute_copy::<_, *mut vm::Func>(self)
|
||||
}).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,7 @@ use smallvec::SmallVec;
|
||||
use std::ptr::NonNull;
|
||||
use std::{
|
||||
any::Any,
|
||||
collections::HashMap,
|
||||
collections::{HashMap, BTreeMap},
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
use wasmer_runtime_core::{
|
||||
@ -29,7 +29,7 @@ use wasmer_runtime_core::{
|
||||
TableIndex, Type,
|
||||
},
|
||||
vm::{self, LocalGlobal, LocalTable, INTERNALS_SIZE},
|
||||
state::{FunctionStateMap, x64::X64Register, MachineState, MachineValue, MachineStateDiff},
|
||||
state::{ModuleStateMap, FunctionStateMap, x64::X64Register, MachineState, MachineValue, MachineStateDiff, x64::new_machine_state},
|
||||
};
|
||||
use wasmparser::{Operator, Type as WpType};
|
||||
|
||||
@ -138,10 +138,24 @@ enum LocalOrTemp {
|
||||
Temp,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct InstMetadata {
|
||||
offset: usize,
|
||||
special: Option<(SpecialInst, usize /* state_diff_id */)>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum SpecialInst {
|
||||
Loop, /* header state */
|
||||
Call { mid_offset: usize }, /* inside state */
|
||||
}
|
||||
|
||||
pub struct X64FunctionCode {
|
||||
signatures: Arc<Map<SigIndex, FuncSig>>,
|
||||
function_signatures: Arc<Map<FuncIndex, SigIndex>>,
|
||||
last_state: MachineState,
|
||||
fsm: FunctionStateMap,
|
||||
inst_metadata: Vec<InstMetadata>,
|
||||
offset: usize,
|
||||
|
||||
assembler: Option<Assembler>,
|
||||
function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>,
|
||||
@ -175,6 +189,7 @@ pub struct X64ExecutionContext {
|
||||
signatures: Arc<Map<SigIndex, FuncSig>>,
|
||||
breakpoints: Arc<HashMap<usize, Box<Fn(BkptInfo) + Send + Sync + 'static>>>,
|
||||
func_import_count: usize,
|
||||
msm: ModuleStateMap,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -184,6 +199,8 @@ pub struct ControlFrame {
|
||||
pub if_else: IfElseState,
|
||||
pub returns: SmallVec<[WpType; 1]>,
|
||||
pub value_stack_depth: usize,
|
||||
pub state: MachineState,
|
||||
pub state_diff_id: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
@ -204,6 +221,10 @@ impl RunnableModule for X64ExecutionContext {
|
||||
.and_then(|ptr| NonNull::new(ptr.0 as *mut vm::Func))
|
||||
}
|
||||
|
||||
fn get_module_state_map(&self) -> Option<ModuleStateMap> {
|
||||
Some(self.msm.clone())
|
||||
}
|
||||
|
||||
fn get_trampoline(&self, _: &ModuleInfo, sig_index: SigIndex) -> Option<Wasm> {
|
||||
use std::ffi::c_void;
|
||||
use wasmer_runtime_core::typed_func::WasmTrapInfo;
|
||||
@ -360,7 +381,9 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
|
||||
let code = X64FunctionCode {
|
||||
signatures: self.signatures.as_ref().unwrap().clone(),
|
||||
function_signatures: self.function_signatures.as_ref().unwrap().clone(),
|
||||
last_state: machine.state.clone(),
|
||||
fsm: FunctionStateMap::new(new_machine_state(), 32),
|
||||
inst_metadata: vec![],
|
||||
offset: begin_offset.0,
|
||||
|
||||
assembler: Some(assembler),
|
||||
function_labels: Some(function_labels),
|
||||
@ -391,6 +414,7 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
|
||||
});
|
||||
}
|
||||
};
|
||||
let total_size = assembler.get_offset().0;
|
||||
let output = assembler.finalize().unwrap();
|
||||
|
||||
let function_labels = if let Some(x) = self.functions.last() {
|
||||
@ -429,6 +453,10 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
|
||||
.collect(),
|
||||
);
|
||||
|
||||
let local_function_maps: BTreeMap<usize, FunctionStateMap> = self.functions.iter().map(|x| {
|
||||
(x.offset, x.fsm.clone())
|
||||
}).collect();
|
||||
|
||||
struct Placeholder;
|
||||
impl CacheGen for Placeholder {
|
||||
fn generate_cache(&self) -> Result<(Box<[u8]>, Memory), CacheError> {
|
||||
@ -446,6 +474,10 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
|
||||
func_import_count: self.func_import_count,
|
||||
function_pointers: out_labels,
|
||||
function_offsets: out_offsets,
|
||||
msm: ModuleStateMap {
|
||||
local_functions: local_function_maps,
|
||||
total_size,
|
||||
},
|
||||
},
|
||||
Box::new(Placeholder),
|
||||
))
|
||||
@ -1060,6 +1092,7 @@ impl X64FunctionCode {
|
||||
m: &mut Machine,
|
||||
cb: F,
|
||||
params: I,
|
||||
state_context: Option<(&mut FunctionStateMap, &mut InstMetadata, &[ControlFrame])>,
|
||||
) {
|
||||
// Values pushed in this function are above the shadow region.
|
||||
m.state.stack_values.push(MachineValue::ExplicitShadow);
|
||||
@ -1070,7 +1103,9 @@ impl X64FunctionCode {
|
||||
let used_gprs = m.get_used_gprs();
|
||||
for r in used_gprs.iter() {
|
||||
a.emit_push(Size::S64, Location::GPR(*r));
|
||||
m.state.stack_values.push(MachineValue::PreserveRegister(X64Register::GPR(*r).to_index()));
|
||||
let content = m.state.register_values[X64Register::GPR(*r).to_index().0];
|
||||
assert!(content != MachineValue::Undefined);
|
||||
m.state.stack_values.push(content);
|
||||
}
|
||||
|
||||
// Save used XMM registers.
|
||||
@ -1093,7 +1128,9 @@ impl X64FunctionCode {
|
||||
);
|
||||
}
|
||||
for r in used_xmms.iter().rev() {
|
||||
m.state.stack_values.push(MachineValue::PreserveRegister(X64Register::XMM(*r).to_index()));
|
||||
let content = m.state.register_values[X64Register::XMM(*r).to_index().0];
|
||||
assert!(content != MachineValue::Undefined);
|
||||
m.state.stack_values.push(content);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1129,16 +1166,20 @@ impl X64FunctionCode {
|
||||
Location::Memory(_, _) => {
|
||||
match *param {
|
||||
Location::GPR(x) => {
|
||||
m.state.stack_values.push(MachineValue::PreserveRegister(X64Register::GPR(x).to_index()));
|
||||
let content = m.state.register_values[X64Register::GPR(x).to_index().0];
|
||||
//assert!(content != MachineValue::Undefined);
|
||||
m.state.stack_values.push(content);
|
||||
}
|
||||
Location::XMM(x) => {
|
||||
m.state.stack_values.push(MachineValue::PreserveRegister(X64Register::XMM(x).to_index()));
|
||||
let content = m.state.register_values[X64Register::XMM(x).to_index().0];
|
||||
//assert!(content != MachineValue::Undefined);
|
||||
m.state.stack_values.push(content);
|
||||
}
|
||||
Location::Memory(reg, offset) => {
|
||||
if reg != GPR::RBP {
|
||||
unreachable!();
|
||||
}
|
||||
m.state.stack_values.push(MachineValue::CopyStackBPRelative(offset));
|
||||
m.state.stack_values.push(MachineValue::CopyStackBPRelative(offset)); // TODO: Read value at this offset
|
||||
}
|
||||
_ => {
|
||||
m.state.stack_values.push(MachineValue::Undefined);
|
||||
@ -1202,6 +1243,14 @@ impl X64FunctionCode {
|
||||
|
||||
cb(a);
|
||||
|
||||
// Offset needs to be after the 'call' instruction.
|
||||
if let Some((fsm, inst_metadata, control_stack)) = state_context {
|
||||
let state_diff_id = Self::get_state_diff(m, fsm, control_stack);
|
||||
let offset = a.get_offset().0;
|
||||
inst_metadata.special = Some((SpecialInst::Call { mid_offset: offset }, state_diff_id));
|
||||
fsm.call_offsets.insert(offset, state_diff_id);
|
||||
}
|
||||
|
||||
// Restore stack.
|
||||
if stack_offset > 0 {
|
||||
a.emit_add(
|
||||
@ -1251,8 +1300,9 @@ impl X64FunctionCode {
|
||||
m: &mut Machine,
|
||||
label: DynamicLabel,
|
||||
params: I,
|
||||
state_context: Option<(&mut FunctionStateMap, &mut InstMetadata, &[ControlFrame])>,
|
||||
) {
|
||||
Self::emit_call_sysv(a, m, |a| a.emit_call_label(label), params)
|
||||
Self::emit_call_sysv(a, m, |a| a.emit_call_label(label), params, state_context)
|
||||
}
|
||||
|
||||
/// Emits a memory operation.
|
||||
@ -1446,6 +1496,15 @@ impl X64FunctionCode {
|
||||
m.release_temp_xmm(tmp_x);
|
||||
m.release_temp_gpr(tmp);
|
||||
}
|
||||
|
||||
pub fn get_state_diff(m: &Machine, fsm: &mut FunctionStateMap, control_stack: &[ControlFrame]) -> usize {
|
||||
let last_frame = control_stack.last().unwrap();
|
||||
let mut diff = m.state.diff(&last_frame.state);
|
||||
diff.last = Some(last_frame.state_diff_id);
|
||||
let id = fsm.diffs.len();
|
||||
fsm.diffs.push(diff);
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
@ -1487,7 +1546,11 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
.machine
|
||||
.init_locals(a, self.num_locals, self.num_params);
|
||||
|
||||
println!("initial state = {:?}", self.machine.state);
|
||||
let diff = self.machine.state.diff(&new_machine_state());
|
||||
let state_diff_id = self.fsm.diffs.len();
|
||||
self.fsm.diffs.push(diff);
|
||||
|
||||
//println!("initial state = {:?}", self.machine.state);
|
||||
|
||||
a.emit_sub(Size::S64, Location::Imm32(32), Location::GPR(GPR::RSP)); // simulate "red zone" if not supported by the platform
|
||||
|
||||
@ -1497,6 +1560,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
if_else: IfElseState::None,
|
||||
returns: self.returns.clone(),
|
||||
value_stack_depth: 0,
|
||||
state: self.machine.state.clone(),
|
||||
state_diff_id,
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
@ -1625,6 +1690,11 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
let mut inst_metadata = InstMetadata {
|
||||
offset: a.get_offset().0,
|
||||
special: None,
|
||||
};
|
||||
match *op {
|
||||
Operator::GetGlobal { global_index } => {
|
||||
let global_index = global_index as usize;
|
||||
@ -3234,7 +3304,9 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
&mut self.machine,
|
||||
label,
|
||||
params.iter().map(|&(x, _)| x),
|
||||
Some((&mut self.fsm, &mut inst_metadata, &self.control_stack)),
|
||||
);
|
||||
assert!(inst_metadata.special.is_some());
|
||||
|
||||
self.machine.release_locations_only_stack(a, &released);
|
||||
|
||||
@ -3347,7 +3419,9 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
));
|
||||
},
|
||||
params.iter().map(|&(x, _)| x),
|
||||
Some((&mut self.fsm, &mut inst_metadata, &self.control_stack)),
|
||||
);
|
||||
assert!(inst_metadata.special.is_some());
|
||||
|
||||
self.machine.release_locations_only_stack(a, &released);
|
||||
|
||||
@ -3373,6 +3447,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
_ => smallvec![ty],
|
||||
},
|
||||
value_stack_depth: self.value_stack.len(),
|
||||
state: self.machine.state.clone(),
|
||||
state_diff_id: Self::get_state_diff(&self.machine, &mut self.fsm, &self.control_stack),
|
||||
});
|
||||
Self::emit_relaxed_binop(
|
||||
a,
|
||||
@ -3472,10 +3548,13 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
_ => smallvec![ty],
|
||||
},
|
||||
value_stack_depth: self.value_stack.len(),
|
||||
state: self.machine.state.clone(),
|
||||
state_diff_id: Self::get_state_diff(&self.machine, &mut self.fsm, &self.control_stack),
|
||||
});
|
||||
}
|
||||
Operator::Loop { ty } => {
|
||||
let label = a.get_label();
|
||||
let state_diff_id = Self::get_state_diff(&self.machine, &mut self.fsm, &self.control_stack);
|
||||
self.control_stack.push(ControlFrame {
|
||||
label: label,
|
||||
loop_like: true,
|
||||
@ -3485,7 +3564,11 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
_ => smallvec![ty],
|
||||
},
|
||||
value_stack_depth: self.value_stack.len(),
|
||||
state: self.machine.state.clone(),
|
||||
state_diff_id,
|
||||
});
|
||||
inst_metadata.special = Some((SpecialInst::Loop, state_diff_id));
|
||||
self.fsm.loop_offsets.insert(a.get_offset().0, state_diff_id);
|
||||
a.emit_label(label);
|
||||
}
|
||||
Operator::Nop => {}
|
||||
@ -3511,6 +3594,7 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
a.emit_call_location(Location::GPR(GPR::RAX));
|
||||
},
|
||||
::std::iter::once(Location::Imm32(memory_index.index() as u32)),
|
||||
None,
|
||||
);
|
||||
let ret = self.machine.acquire_locations(a, &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))], false)[0];
|
||||
self.value_stack.push((ret, LocalOrTemp::Temp));
|
||||
@ -3546,6 +3630,7 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
},
|
||||
::std::iter::once(Location::Imm32(memory_index.index() as u32))
|
||||
.chain(::std::iter::once(param_pages)),
|
||||
None,
|
||||
);
|
||||
|
||||
if param_pages_lot == LocalOrTemp::Temp {
|
||||
@ -4367,9 +4452,7 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
}
|
||||
}
|
||||
|
||||
let diff = self.machine.state.diff(&self.last_state);
|
||||
println!("Event {:?} caused state difference {:?}", ev, diff);
|
||||
self.last_state = self.machine.state.clone();
|
||||
self.inst_metadata.push(inst_metadata);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ pub trait Emitter {
|
||||
type Offset;
|
||||
|
||||
fn get_label(&mut self) -> Self::Label;
|
||||
fn get_offset(&mut self) -> Self::Offset;
|
||||
fn get_offset(&self) -> Self::Offset;
|
||||
|
||||
fn emit_u64(&mut self, x: u64);
|
||||
|
||||
@ -455,7 +455,7 @@ impl Emitter for Assembler {
|
||||
self.new_dynamic_label()
|
||||
}
|
||||
|
||||
fn get_offset(&mut self) -> AssemblyOffset {
|
||||
fn get_offset(&self) -> AssemblyOffset {
|
||||
self.offset()
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,10 @@ use std::path::PathBuf;
|
||||
|
||||
pub use self::utils::is_wasi_module;
|
||||
|
||||
use wasmer_runtime_core::{func, import::ImportObject, imports};
|
||||
use wasmer_runtime_core::{func, import::ImportObject, imports, trampoline::{TrampolineBufferBuilder, CallContext}};
|
||||
use wasmer_runtime_core::state::{x64::read_stack};
|
||||
use wasmer_runtime_core::vm::Ctx;
|
||||
use std::rc::Rc;
|
||||
|
||||
/// This is returned in the Box<dyn Any> RuntimeError::Error variant.
|
||||
/// Use `downcast` or `downcast_ref` to retrieve the `ExitCode`.
|
||||
@ -34,6 +37,22 @@ pub fn generate_import_object(
|
||||
preopened_files: Vec<String>,
|
||||
mapped_dirs: Vec<(String, PathBuf)>,
|
||||
) -> ImportObject {
|
||||
unsafe extern "C" fn read_stack(ctx: &mut Ctx, _: *const CallContext, stack: *const u64) {
|
||||
let msm = (*ctx.module).runnable_module.get_module_state_map().unwrap();
|
||||
let code_base = (*ctx.module).runnable_module.get_code().unwrap().as_ptr() as usize;
|
||||
self::read_stack(&msm, code_base, stack);
|
||||
}
|
||||
|
||||
let mut builder = TrampolineBufferBuilder::new();
|
||||
let idx = builder.add_context_rsp_trampoline(read_stack, ::std::ptr::null());
|
||||
let trampolines = builder.build();
|
||||
|
||||
let read_stack_indirect: fn (&mut Ctx) = unsafe {
|
||||
::std::mem::transmute(trampolines.get_trampoline(idx))
|
||||
};
|
||||
|
||||
let trampolines = Rc::new(trampolines);
|
||||
|
||||
let state_gen = move || {
|
||||
fn state_destructor(data: *mut c_void) {
|
||||
unsafe {
|
||||
@ -45,6 +64,7 @@ pub fn generate_import_object(
|
||||
fs: WasiFs::new(&preopened_files, &mapped_dirs).unwrap(),
|
||||
args: &args[..],
|
||||
envs: &envs[..],
|
||||
trampolines: trampolines.clone(),
|
||||
});
|
||||
|
||||
(
|
||||
@ -56,6 +76,7 @@ pub fn generate_import_object(
|
||||
// This generates the wasi state.
|
||||
state_gen,
|
||||
"wasi_unstable" => {
|
||||
"stack_read" => func!(read_stack_indirect),
|
||||
"args_get" => func!(args_get),
|
||||
"args_sizes_get" => func!(args_sizes_get),
|
||||
"clock_res_get" => func!(clock_res_get),
|
||||
|
@ -11,8 +11,10 @@ use std::{
|
||||
io::{self, Read, Seek, Write},
|
||||
path::PathBuf,
|
||||
time::SystemTime,
|
||||
rc::Rc,
|
||||
};
|
||||
use wasmer_runtime_core::debug;
|
||||
use wasmer_runtime_core::trampoline::{TrampolineBuffer};
|
||||
|
||||
pub const MAX_SYMLINKS: usize = 100;
|
||||
|
||||
@ -460,6 +462,7 @@ pub struct WasiState<'a> {
|
||||
pub fs: WasiFs,
|
||||
pub args: &'a [Vec<u8>],
|
||||
pub envs: &'a [Vec<u8>],
|
||||
pub trampolines: Rc<TrampolineBuffer>,
|
||||
}
|
||||
|
||||
pub fn host_file_type_to_wasi_file_type(file_type: fs::FileType) -> __wasi_filetype_t {
|
||||
|
Loading…
Reference in New Issue
Block a user