Resolve semantics for more values.

This commit is contained in:
losfair 2019-08-01 23:28:39 +08:00
parent 4f56703657
commit 9ed5094f86
7 changed files with 236 additions and 8 deletions

View File

@ -367,6 +367,7 @@ impl LLVMBackend {
.expect("size_record not found");
start_entry.populate_msm(
module_info,
code_ptr as usize,
&map,
size_record,

View File

@ -19,7 +19,7 @@ use wasmer_runtime_core::{
module::{ModuleInfo, ModuleInner},
structures::{Map, TypedIndex},
types::{
FuncIndex, FuncSig, GlobalIndex, LocalOrImport, MemoryIndex, SigIndex, TableIndex, Type,
FuncIndex, FuncSig, GlobalIndex, LocalOrImport, MemoryIndex, SigIndex, TableIndex, Type, ImportedFuncIndex,
},
};
use wasmparser::{BinaryReaderError, MemoryImmediate, Operator, Type as WpType};
@ -303,6 +303,7 @@ fn resolve_memory_ptr(
}
fn emit_stack_map(
module_info: &ModuleInfo,
intrinsics: &Intrinsics,
builder: &Builder,
local_function_id: usize,
@ -310,6 +311,7 @@ fn emit_stack_map(
kind: StackmapEntryKind,
locals: &[PointerValue],
state: &State,
ctx: &mut CtxType,
opcode_offset: usize,
) {
let stackmap_id = target.entries.len();
@ -333,6 +335,76 @@ fn emit_stack_map(
params.extend_from_slice(&state.stack);
value_semantics.extend((0..state.stack.len()).map(ValueSemantic::WasmStack));
params.push(ctx.basic());
value_semantics.push(ValueSemantic::Ctx);
if module_info.memories.len() + module_info.imported_memories.len() > 0 {
let cache = ctx.memory(MemoryIndex::new(0), intrinsics);
match cache {
MemoryCache::Dynamic { ptr_to_base_ptr, ptr_to_bounds } => {
params.push(ptr_to_base_ptr.as_basic_value_enum());
value_semantics.push(ValueSemantic::PointerToMemoryBase);
params.push(ptr_to_bounds.as_basic_value_enum());
value_semantics.push(ValueSemantic::PointerToMemoryBound);
}
MemoryCache::Static { base_ptr, bounds } => {
params.push(base_ptr.as_basic_value_enum());
value_semantics.push(ValueSemantic::MemoryBase);
params.push(bounds.as_basic_value_enum());
value_semantics.push(ValueSemantic::MemoryBound);
}
}
}
if module_info.tables.len() + module_info.imported_tables.len() > 0 {
let (ptr_to_base_ptr, ptr_to_bounds) = ctx.table_prepare(TableIndex::new(0), intrinsics);
params.push(ptr_to_base_ptr.as_basic_value_enum());
value_semantics.push(ValueSemantic::PointerToTableBase);
params.push(ptr_to_bounds.as_basic_value_enum());
value_semantics.push(ValueSemantic::PointerToMemoryBound);
}
if module_info.globals.len() + module_info.imported_globals.len() > 0 {
for i in 0..module_info.globals.len() + module_info.imported_globals.len() {
let cache = ctx.global_cache(GlobalIndex::new(i), intrinsics);
match cache {
GlobalCache::Const { value } => {
params.push(value);
value_semantics.push(ValueSemantic::Global(i));
}
GlobalCache::Mut { ptr_to_value } => {
params.push(ptr_to_value.as_basic_value_enum());
value_semantics.push(ValueSemantic::PointerToGlobal(i));
}
}
}
}
if module_info.imported_functions.len() > 0 {
// TODO: Optimize this
for i in 0..module_info.imported_functions.len() {
let (func_ptr, ctx_ptr) = ctx.imported_func(ImportedFuncIndex::new(i), intrinsics);
params.push(func_ptr.as_basic_value_enum());
value_semantics.push(ValueSemantic::ImportedFuncPointer(i));
params.push(ctx_ptr.as_basic_value_enum());
value_semantics.push(ValueSemantic::ImportedFuncCtx(i));
}
}
params.push(ctx.signal_mem().as_basic_value_enum());
value_semantics.push(ValueSemantic::SignalMem);
// TODO: sigindices
assert_eq!(params.len(), value_semantics.len() + 2);
builder.build_call(intrinsics.experimental_stackmap, &params, &state.var_name());
target.entries.push(StackmapEntry {
@ -487,6 +559,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
let mut stackmaps = self.stackmaps.borrow_mut();
emit_stack_map(
&module_info,
&intrinsics,
&builder,
self.index,
@ -494,6 +567,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
StackmapEntryKind::FunctionHeader,
&self.locals,
&state,
self.ctx.as_mut().unwrap(),
::std::usize::MAX,
);
finalize_opcode_stack_map(
@ -605,6 +679,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
if let Some(offset) = opcode_offset {
let mut stackmaps = self.stackmaps.borrow_mut();
emit_stack_map(
&info,
intrinsics,
builder,
self.index,
@ -612,6 +687,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
StackmapEntryKind::Loop,
&self.locals,
state,
ctx,
offset,
);
let signal_mem = ctx.signal_mem();
@ -892,6 +968,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
if let Some(offset) = opcode_offset {
let mut stackmaps = self.stackmaps.borrow_mut();
emit_stack_map(
&info,
intrinsics,
builder,
self.index,
@ -899,6 +976,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
StackmapEntryKind::Trappable,
&self.locals,
state,
ctx,
offset,
);
builder.build_call(intrinsics.trap, &[], "trap");
@ -908,7 +986,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
self.index,
&mut *stackmaps,
StackmapEntryKind::Trappable,
offset + 1,
offset,
);
}
@ -1052,6 +1130,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
if let Some(offset) = opcode_offset {
let mut stackmaps = self.stackmaps.borrow_mut();
emit_stack_map(
&info,
intrinsics,
builder,
self.index,
@ -1059,6 +1138,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
StackmapEntryKind::Call,
&self.locals,
state,
ctx,
offset,
)
}
@ -1241,6 +1321,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
if let Some(offset) = opcode_offset {
let mut stackmaps = self.stackmaps.borrow_mut();
emit_stack_map(
&info,
intrinsics,
builder,
self.index,
@ -1248,6 +1329,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
StackmapEntryKind::Call,
&self.locals,
state,
ctx,
offset,
)
}
@ -2737,6 +2819,7 @@ impl ModuleCodeGenerator<LLVMFunctionCodeGenerator, LLVMBackend, CodegenError>
if cfg!(test) {
pass_manager.add_verifier_pass();
}
/*
pass_manager.add_lower_expect_intrinsic_pass();
pass_manager.add_scalar_repl_aggregates_pass();
pass_manager.add_instruction_combining_pass();
@ -2749,7 +2832,7 @@ impl ModuleCodeGenerator<LLVMFunctionCodeGenerator, LLVMBackend, CodegenError>
pass_manager.add_reassociate_pass();
pass_manager.add_cfg_simplification_pass();
pass_manager.add_bit_tracking_dce_pass();
pass_manager.add_slp_vectorize_pass();
pass_manager.add_slp_vectorize_pass();*/
pass_manager.run_on_module(&self.module);
// self.module.print_to_stderr();

View File

@ -582,12 +582,11 @@ impl<'a> CtxType<'a> {
})
}
pub fn table(
pub fn table_prepare(
&mut self,
index: TableIndex,
intrinsics: &Intrinsics,
builder: &Builder,
) -> (PointerValue, IntValue) {
) -> (PointerValue, PointerValue) {
let (cached_tables, info, ctx_ptr_value, cache_builder) = (
&mut self.cached_tables,
self.info,
@ -646,6 +645,16 @@ impl<'a> CtxType<'a> {
}
});
(ptr_to_base_ptr, ptr_to_bounds)
}
pub fn table(
&mut self,
index: TableIndex,
intrinsics: &Intrinsics,
builder: &Builder,
) -> (PointerValue, IntValue) {
let (ptr_to_base_ptr, ptr_to_bounds) = self.table_prepare(index, intrinsics);
(
builder
.build_load(ptr_to_base_ptr, "base_ptr")

View File

@ -8,6 +8,13 @@ use wasmer_runtime_core::state::{
FunctionStateMap, MachineStateDiff, MachineValue, ModuleStateMap, OffsetInfo, RegisterIndex,
SuspendOffset, WasmAbstractValue,
};
use wasmer_runtime_core::vm::Ctx;
use wasmer_runtime_core::{
module::ModuleInfo,
types::{GlobalIndex, TableIndex, LocalOrImport},
structures::TypedIndex,
vm,
};
#[derive(Default, Debug, Clone)]
pub struct StackmapRegistry {
@ -29,6 +36,19 @@ pub struct StackmapEntry {
pub enum ValueSemantic {
WasmLocal(usize),
WasmStack(usize),
Ctx,
SignalMem,
PointerToMemoryBase,
PointerToMemoryBound, // 64-bit
MemoryBase,
MemoryBound, // 64-bit
PointerToGlobal(usize),
Global(usize),
PointerToTableBase,
PointerToTableBound,
ImportedFuncPointer(usize),
ImportedFuncCtx(usize),
DynamicSigindice(usize),
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
@ -69,6 +89,7 @@ pub struct MachineStateDiff {
impl StackmapEntry {
pub fn populate_msm(
&self,
module_info: &ModuleInfo,
code_addr: usize,
llvm_map: &StackMap,
size_record: &StkSizeRecord,
@ -86,6 +107,7 @@ impl StackmapEntry {
.checked_sub(code_addr)
.unwrap();
let target_offset = func_base_addr + map_record.instruction_offset as usize;
assert!(self.is_start);
if msm.local_functions.len() == self.local_function_id {
assert_eq!(self.kind, StackmapEntryKind::FunctionHeader);
@ -131,6 +153,31 @@ impl StackmapEntry {
wasm_stack.push(WasmAbstractValue::Runtime);
MachineValue::WasmStack(x)
}
ValueSemantic::Ctx => MachineValue::Vmctx,
ValueSemantic::SignalMem => MachineValue::VmctxDeref(vec![Ctx::offset_interrupt_signal_mem() as usize, 0]),
ValueSemantic::PointerToMemoryBase => MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize]),
ValueSemantic::PointerToMemoryBound => MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize]),
ValueSemantic::MemoryBase => MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize, 0]),
ValueSemantic::MemoryBound => MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize, 0]),
ValueSemantic::PointerToGlobal(idx) => MachineValue::VmctxDeref(deref_global(module_info, idx, false)),
ValueSemantic::Global(idx) => MachineValue::VmctxDeref(deref_global(module_info, idx, true)),
ValueSemantic::PointerToTableBase => MachineValue::VmctxDeref(deref_table_base(module_info, 0, false)),
ValueSemantic::PointerToTableBound => MachineValue::VmctxDeref(deref_table_bound(module_info, 0, false)),
ValueSemantic::ImportedFuncPointer(idx) => MachineValue::VmctxDeref(vec![
Ctx::offset_imported_funcs() as usize,
vm::ImportedFunc::size() as usize * idx + vm::ImportedFunc::offset_func() as usize,
0,
]),
ValueSemantic::ImportedFuncCtx(idx) => MachineValue::VmctxDeref(vec![
Ctx::offset_imported_funcs() as usize,
vm::ImportedFunc::size() as usize * idx + vm::ImportedFunc::offset_vmctx() as usize,
0,
]),
ValueSemantic::DynamicSigindice(idx) => MachineValue::VmctxDeref(vec![
Ctx::offset_signatures() as usize,
idx * 4,
0,
]),
};
match loc.ty {
LocationType::Register => {
@ -488,3 +535,60 @@ impl StackMap {
Ok(map)
}
}
fn deref_global(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
let mut x: Vec<usize> = match GlobalIndex::new(idx).local_or_import(info) {
LocalOrImport::Local(idx) => vec![
Ctx::offset_globals() as usize,
idx.index() * 8,
0,
],
LocalOrImport::Import(idx) => vec![
Ctx::offset_imported_globals() as usize,
idx.index() * 8,
0,
],
};
if deref_into_value {
x.push(0);
}
x
}
fn deref_table_base(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {
LocalOrImport::Local(idx) => vec![
Ctx::offset_tables() as usize,
idx.index() * 8,
0,
],
LocalOrImport::Import(idx) => vec![
Ctx::offset_imported_tables() as usize,
idx.index() * 8,
0,
],
};
if deref_into_value {
x.push(0);
}
x
}
fn deref_table_bound(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {
LocalOrImport::Local(idx) => vec![
Ctx::offset_tables() as usize,
idx.index() * 8,
8,
],
LocalOrImport::Import(idx) => vec![
Ctx::offset_imported_tables() as usize,
idx.index() * 8,
8,
],
};
if deref_into_value {
x.push(0);
}
x
}

View File

@ -188,6 +188,8 @@ extern "C" fn signal_trap_handler(
let should_unwind = allocate_and_run(TRAP_STACK_SIZE, || {
let mut is_suspend_signal = false;
println!("SIGNAL: {:?} {:?}", Signal::from_c_int(signum), fault.faulting_addr);
match Signal::from_c_int(signum) {
Ok(SIGTRAP) => {
// breakpoint

View File

@ -1,5 +1,6 @@
#![deny(unused_imports, unused_variables, unused_unsafe, unreachable_patterns)]
#![cfg_attr(nightly, feature(unwind_attributes))]
#![feature(core_intrinsics)]
#[cfg(test)]
#[macro_use]

View File

@ -39,6 +39,7 @@ pub struct MachineStateDiff {
pub enum MachineValue {
Undefined,
Vmctx,
VmctxDeref(Vec<usize>),
PreserveRegister(RegisterIndex),
CopyStackBPRelative(i32), // relative to Base Pointer, in byte offset
ExplicitShadow, // indicates that all values above this are above the shadow region
@ -361,6 +362,14 @@ pub mod x64 {
use crate::vm::Ctx;
use std::any::Any;
unsafe fn compute_vmctx_deref(vmctx: *const Ctx, seq: &[usize]) -> u64 {
let mut ptr = &vmctx as *const *const Ctx as *const u8;
for x in seq {
ptr = (*(ptr as *const *const u8)).offset(*x as isize);
}
ptr as usize as u64
}
pub fn new_machine_state() -> MachineState {
MachineState {
stack_values: vec![],
@ -427,6 +436,10 @@ pub mod x64 {
stack_offset -= 1;
stack[stack_offset] = vmctx as *mut Ctx as usize as u64;
}
MachineValue::VmctxDeref(ref seq) => {
stack_offset -= 1;
stack[stack_offset] = compute_vmctx_deref(vmctx as *const Ctx, seq);
}
MachineValue::PreserveRegister(index) => {
stack_offset -= 1;
stack[stack_offset] = known_registers[index.0].unwrap_or(0);
@ -481,7 +494,6 @@ pub mod x64 {
}
},
MachineValue::WasmLocal(x) => {
stack_offset -= 1;
match fsm.locals[x] {
WasmAbstractValue::Const(x) => {
assert!(x <= ::std::u32::MAX as u64);
@ -494,6 +506,9 @@ pub mod x64 {
}
}
}
MachineValue::VmctxDeref(ref seq) => {
stack[stack_offset] |= compute_vmctx_deref(vmctx as *const Ctx, seq) & (::std::u32::MAX as u64);
}
MachineValue::Undefined => {}
_ => unimplemented!("TwoHalves.0"),
}
@ -510,7 +525,6 @@ pub mod x64 {
}
},
MachineValue::WasmLocal(x) => {
stack_offset -= 1;
match fsm.locals[x] {
WasmAbstractValue::Const(x) => {
assert!(x <= ::std::u32::MAX as u64);
@ -523,6 +537,9 @@ pub mod x64 {
}
}
}
MachineValue::VmctxDeref(ref seq) => {
stack[stack_offset] |= (compute_vmctx_deref(vmctx as *const Ctx, seq) & (::std::u32::MAX as u64)) << 32;
}
MachineValue::Undefined => {}
_ => unimplemented!("TwoHalves.1"),
}
@ -539,6 +556,9 @@ pub mod x64 {
MachineValue::Vmctx => {
known_registers[i] = Some(vmctx as *mut Ctx as usize as u64);
}
MachineValue::VmctxDeref(ref seq) => {
known_registers[i] = Some(compute_vmctx_deref(vmctx as *const Ctx, seq));
}
MachineValue::WasmStack(x) => match state.wasm_stack[x] {
WasmAbstractValue::Const(x) => {
known_registers[i] = Some(x);
@ -563,6 +583,7 @@ pub mod x64 {
stack_offset -= 1;
stack[stack_offset] = (code_base + activate_offset) as u64; // return address
println!("activating at {:?}", (code_base + activate_offset) as *const u8);
}
stack_offset -= 1;
@ -673,6 +694,7 @@ pub mod x64 {
catch_unsafe_unwind(
|| {
::std::intrinsics::breakpoint();
run_on_alternative_stack(
stack.as_mut_ptr().offset(stack.len() as isize),
stack.as_mut_ptr().offset(stack_offset as isize),
@ -765,6 +787,7 @@ pub mod x64 {
match *v {
MachineValue::Undefined => {}
MachineValue::Vmctx => {}
MachineValue::VmctxDeref(_) => {}
MachineValue::WasmStack(idx) => {
if let Some(v) = known_registers[i] {
wasm_stack[idx] = Some(v);
@ -809,6 +832,9 @@ pub mod x64 {
MachineValue::Vmctx => {
stack = stack.offset(1);
}
MachineValue::VmctxDeref(_) => {
stack = stack.offset(1);
}
MachineValue::PreserveRegister(idx) => {
known_registers[idx.0] = Some(*stack);
stack = stack.offset(1);
@ -834,6 +860,7 @@ pub mod x64 {
MachineValue::WasmLocal(idx) => {
wasm_locals[idx] = Some(v & 0xffffffffu64);
}
MachineValue::VmctxDeref(_) => {}
MachineValue::Undefined => {}
_ => unimplemented!("TwoHalves.0 (read)"),
}
@ -844,6 +871,7 @@ pub mod x64 {
MachineValue::WasmLocal(idx) => {
wasm_locals[idx] = Some(v >> 32);
}
MachineValue::VmctxDeref(_) => {}
MachineValue::Undefined => {}
_ => unimplemented!("TwoHalves.1 (read)"),
}