2019-06-12 05:38:58 +00:00
use std ::collections ::BTreeMap ;
use std ::ops ::Bound ::{ Included , Unbounded } ;
2019-06-11 12:49:30 +00:00
#[ derive(Copy, Clone, Debug, Eq, PartialEq, Hash) ]
2019-06-09 13:21:18 +00:00
pub struct RegisterIndex ( pub usize ) ;
2019-06-24 19:55:33 +00:00
#[ derive(Copy, Clone, Debug, Eq, PartialEq, Hash) ]
pub enum WasmAbstractValue {
Runtime ,
Const ( u64 ) ,
}
2019-06-11 12:49:30 +00:00
#[ derive(Clone, Debug) ]
pub struct MachineState {
pub stack_values : Vec < MachineValue > ,
pub register_values : Vec < MachineValue > ,
2019-06-24 19:55:33 +00:00
pub wasm_stack : Vec < WasmAbstractValue > ,
pub wasm_stack_private_depth : usize ,
2019-06-25 12:01:56 +00:00
pub wasm_inst_offset : usize ,
2019-06-09 13:21:18 +00:00
}
#[ derive(Clone, Debug, Default) ]
2019-06-11 12:49:30 +00:00
pub struct MachineStateDiff {
2019-06-09 13:21:18 +00:00
pub last : Option < usize > ,
2019-06-11 12:49:30 +00:00
pub stack_push : Vec < MachineValue > ,
pub stack_pop : usize ,
pub reg_diff : Vec < ( RegisterIndex , MachineValue ) > ,
2019-06-24 19:55:33 +00:00
pub wasm_stack_push : Vec < WasmAbstractValue > ,
pub wasm_stack_pop : usize ,
pub wasm_stack_private_depth : usize , // absolute value; not a diff.
2019-06-25 12:01:56 +00:00
pub wasm_inst_offset : usize , // absolute value; not a diff.
2019-06-11 12:49:30 +00:00
}
#[ derive(Copy, Clone, Debug, Eq, PartialEq, Hash) ]
pub enum MachineValue {
Undefined ,
2019-06-25 12:01:56 +00:00
Vmctx ,
2019-06-11 12:49:30 +00:00
PreserveRegister ( RegisterIndex ) ,
2019-06-11 16:21:43 +00:00
CopyStackBPRelative ( i32 ) , // relative to Base Pointer, in byte offset
2019-06-12 14:02:15 +00:00
ExplicitShadow , // indicates that all values above this are above the shadow region
2019-06-11 12:49:30 +00:00
WasmStack ( usize ) ,
WasmLocal ( usize ) ,
2019-06-09 13:21:18 +00:00
}
#[ derive(Clone, Debug) ]
2019-06-11 12:49:30 +00:00
pub struct FunctionStateMap {
pub initial : MachineState ,
2019-06-24 19:55:33 +00:00
pub local_function_id : usize ,
pub locals : Vec < WasmAbstractValue > ,
2019-06-11 16:21:43 +00:00
pub shadow_size : usize , // for single-pass backend, 32 bytes on x86-64
2019-06-11 12:49:30 +00:00
pub diffs : Vec < MachineStateDiff > ,
2019-06-25 12:01:56 +00:00
pub wasm_offset_to_target_offset : Vec < usize > ,
2019-06-12 05:38:58 +00:00
pub loop_offsets : BTreeMap < usize , usize > , /* offset -> diff_id */
pub call_offsets : BTreeMap < usize , usize > , /* offset -> diff_id */
2019-06-24 19:55:33 +00:00
pub trappable_offsets : BTreeMap < usize , usize > , /* offset -> diff_id */
2019-06-12 05:38:58 +00:00
}
#[ derive(Clone, Debug) ]
pub struct ModuleStateMap {
pub local_functions : BTreeMap < usize , FunctionStateMap > ,
pub total_size : usize ,
}
2019-06-25 12:01:56 +00:00
#[ derive(Clone, Debug, Serialize, Deserialize) ]
2019-06-24 19:55:33 +00:00
pub struct WasmFunctionStateDump {
pub local_function_id : usize ,
2019-06-25 12:01:56 +00:00
pub wasm_inst_offset : usize ,
2019-06-24 19:55:33 +00:00
pub stack : Vec < Option < u64 > > ,
pub locals : Vec < Option < u64 > > ,
2019-06-12 14:02:15 +00:00
}
2019-06-25 12:01:56 +00:00
#[ derive(Clone, Debug, Serialize, Deserialize) ]
2019-06-25 17:38:39 +00:00
pub struct ExecutionStateImage {
2019-06-25 12:01:56 +00:00
pub frames : Vec < WasmFunctionStateDump > ,
}
2019-06-25 17:38:39 +00:00
#[ derive(Debug, Clone, Serialize, Deserialize) ]
pub struct InstanceImage {
pub memory : Option < Vec < u8 > > ,
pub globals : Vec < u64 > ,
pub execution_state : ExecutionStateImage ,
}
2019-06-24 19:55:33 +00:00
impl ModuleStateMap {
2019-06-24 19:56:20 +00:00
fn lookup_call_ip ( & self , ip : usize , base : usize ) -> Option < ( & FunctionStateMap , MachineState ) > {
2019-06-24 19:55:33 +00:00
if ip < base | | ip - base > = self . total_size {
None
} else {
//println!("lookup ip: {} in {:?}", ip - base, self.local_functions);
let ( _ , fsm ) = self
. local_functions
. range ( ( Unbounded , Included ( & ( ip - base ) ) ) )
. last ( )
. unwrap ( ) ;
2019-06-12 14:02:15 +00:00
2019-06-24 19:55:33 +00:00
match fsm . call_offsets . get ( & ( ip - base ) ) {
Some ( x ) = > Some ( ( fsm , fsm . diffs [ * x ] . build_state ( fsm ) ) ) ,
None = > None ,
2019-06-12 14:02:15 +00:00
}
}
}
2019-06-24 19:55:33 +00:00
fn lookup_trappable_ip (
2019-06-12 14:02:15 +00:00
& self ,
ip : usize ,
base : usize ,
) -> Option < ( & FunctionStateMap , MachineState ) > {
2019-06-12 05:38:58 +00:00
if ip < base | | ip - base > = self . total_size {
None
} else {
//println!("lookup ip: {} in {:?}", ip - base, self.local_functions);
2019-06-24 19:55:33 +00:00
let ( _ , fsm ) = self
2019-06-12 14:02:15 +00:00
. local_functions
. range ( ( Unbounded , Included ( & ( ip - base ) ) ) )
. last ( )
. unwrap ( ) ;
2019-06-24 19:55:33 +00:00
match fsm . trappable_offsets . get ( & ( ip - base ) ) {
Some ( x ) = > Some ( ( fsm , fsm . diffs [ * x ] . build_state ( fsm ) ) ) ,
None = > None ,
}
2019-06-12 05:38:58 +00:00
}
}
2019-06-11 12:49:30 +00:00
}
impl FunctionStateMap {
2019-06-24 19:56:20 +00:00
pub fn new (
initial : MachineState ,
local_function_id : usize ,
shadow_size : usize ,
locals : Vec < WasmAbstractValue > ,
) -> FunctionStateMap {
2019-06-11 12:49:30 +00:00
FunctionStateMap {
initial ,
2019-06-24 19:55:33 +00:00
local_function_id ,
2019-06-11 16:21:43 +00:00
shadow_size ,
2019-06-24 19:55:33 +00:00
locals ,
2019-06-11 12:49:30 +00:00
diffs : vec ! [ ] ,
2019-06-25 12:01:56 +00:00
wasm_offset_to_target_offset : Vec ::new ( ) ,
2019-06-12 05:38:58 +00:00
loop_offsets : BTreeMap ::new ( ) ,
call_offsets : BTreeMap ::new ( ) ,
2019-06-24 19:55:33 +00:00
trappable_offsets : BTreeMap ::new ( ) ,
2019-06-11 12:49:30 +00:00
}
}
}
impl MachineState {
pub fn diff ( & self , old : & MachineState ) -> MachineStateDiff {
2019-06-12 14:02:15 +00:00
let first_diff_stack_depth : usize = self
. stack_values
. iter ( )
. zip ( old . stack_values . iter ( ) )
. enumerate ( )
. find ( | & ( _ , ( & a , & b ) ) | a ! = b )
. map ( | x | x . 0 )
2019-06-11 12:49:30 +00:00
. unwrap_or ( old . stack_values . len ( ) . min ( self . stack_values . len ( ) ) ) ;
assert_eq! ( self . register_values . len ( ) , old . register_values . len ( ) ) ;
2019-06-12 14:02:15 +00:00
let reg_diff : Vec < _ > = self
. register_values
. iter ( )
. zip ( old . register_values . iter ( ) )
. enumerate ( )
2019-06-11 12:49:30 +00:00
. filter ( | & ( _ , ( & a , & b ) ) | a ! = b )
. map ( | ( i , ( & a , _ ) ) | ( RegisterIndex ( i ) , a ) )
. collect ( ) ;
2019-06-24 19:55:33 +00:00
let first_diff_wasm_stack_depth : usize = self
. wasm_stack
. iter ( )
. zip ( old . wasm_stack . iter ( ) )
. enumerate ( )
. find ( | & ( _ , ( & a , & b ) ) | a ! = b )
. map ( | x | x . 0 )
. unwrap_or ( old . wasm_stack . len ( ) . min ( self . wasm_stack . len ( ) ) ) ;
2019-06-11 12:49:30 +00:00
MachineStateDiff {
last : None ,
stack_push : self . stack_values [ first_diff_stack_depth .. ] . to_vec ( ) ,
stack_pop : old . stack_values . len ( ) - first_diff_stack_depth ,
reg_diff : reg_diff ,
2019-06-24 19:55:33 +00:00
wasm_stack_push : self . wasm_stack [ first_diff_wasm_stack_depth .. ] . to_vec ( ) ,
wasm_stack_pop : old . wasm_stack . len ( ) - first_diff_wasm_stack_depth ,
wasm_stack_private_depth : self . wasm_stack_private_depth ,
2019-06-25 12:01:56 +00:00
wasm_inst_offset : self . wasm_inst_offset ,
2019-06-11 12:49:30 +00:00
}
}
}
impl MachineStateDiff {
pub fn build_state ( & self , m : & FunctionStateMap ) -> MachineState {
let mut chain : Vec < & MachineStateDiff > = vec! [ ] ;
chain . push ( self ) ;
let mut current = self . last ;
while let Some ( x ) = current {
let that = & m . diffs [ x ] ;
current = that . last ;
chain . push ( that ) ;
}
chain . reverse ( ) ;
let mut state = m . initial . clone ( ) ;
for x in chain {
for _ in 0 .. x . stack_pop {
state . stack_values . pop ( ) . unwrap ( ) ;
}
for v in & x . stack_push {
state . stack_values . push ( * v ) ;
}
for & ( index , v ) in & x . reg_diff {
state . register_values [ index . 0 ] = v ;
}
2019-06-24 19:55:33 +00:00
for _ in 0 .. x . wasm_stack_pop {
state . wasm_stack . pop ( ) . unwrap ( ) ;
}
for v in & x . wasm_stack_push {
state . wasm_stack . push ( * v ) ;
}
2019-06-11 12:49:30 +00:00
}
2019-06-24 19:55:33 +00:00
state . wasm_stack_private_depth = self . wasm_stack_private_depth ;
2019-06-25 12:01:56 +00:00
state . wasm_inst_offset = self . wasm_inst_offset ;
2019-06-11 12:49:30 +00:00
state
}
2019-06-09 13:21:18 +00:00
}
2019-06-25 17:38:39 +00:00
impl ExecutionStateImage {
pub fn from_bytes ( input : & [ u8 ] ) -> Option < ExecutionStateImage > {
2019-06-25 12:01:56 +00:00
use bincode ::deserialize ;
match deserialize ( input ) {
Ok ( x ) = > Some ( x ) ,
Err ( _ ) = > None ,
}
}
}
2019-06-09 13:21:18 +00:00
#[ cfg(all(unix, target_arch = " x86_64 " )) ]
pub mod x64 {
2019-06-25 12:01:56 +00:00
extern " C " {
fn run_on_wasm_stack ( stack_end : * mut u64 , stack_begin : * mut u64 ) -> u64 ;
}
2019-06-09 13:21:18 +00:00
use super ::* ;
2019-06-25 12:01:56 +00:00
use crate ::vm ::Ctx ;
2019-06-25 17:38:39 +00:00
use crate ::types ::LocalGlobalIndex ;
use crate ::structures ::TypedIndex ;
2019-06-09 13:21:18 +00:00
2019-06-11 12:49:30 +00:00
pub fn new_machine_state ( ) -> MachineState {
MachineState {
stack_values : vec ! [ ] ,
register_values : vec ! [ MachineValue ::Undefined ; 16 + 8 ] ,
2019-06-24 19:55:33 +00:00
wasm_stack : vec ! [ ] ,
wasm_stack_private_depth : 0 ,
2019-06-25 12:01:56 +00:00
wasm_inst_offset : ::std ::usize ::MAX ,
2019-06-11 12:49:30 +00:00
}
}
2019-06-25 12:01:56 +00:00
pub unsafe fn invoke_call_return_on_stack_raw_image (
msm : & ModuleStateMap ,
code_base : usize ,
image : & [ u8 ] ,
vmctx : & mut Ctx ,
) -> u64 {
use bincode ::deserialize ;
let image : InstanceImage = deserialize ( image ) . unwrap ( ) ;
invoke_call_return_on_stack ( msm , code_base , & image , vmctx )
}
#[ warn(unused_variables) ]
pub unsafe fn invoke_call_return_on_stack (
msm : & ModuleStateMap ,
code_base : usize ,
image : & InstanceImage ,
vmctx : & mut Ctx ,
) -> u64 {
let mut stack : Vec < u64 > = vec! [ 0 ; 1048576 * 8 / 8 ] ; // 8MB stack
let mut stack_offset : usize = stack . len ( ) ;
stack_offset - = 3 ; // placeholder for call return
let mut last_stack_offset : u64 = 0 ; // rbp
let mut known_registers : [ Option < u64 > ; 24 ] = [ None ; 24 ] ;
let local_functions_vec : Vec < & FunctionStateMap > = msm . local_functions . iter ( ) . map ( | ( k , v ) | v ) . collect ( ) ;
// Bottom to top
2019-06-25 17:38:39 +00:00
for f in image . execution_state . frames . iter ( ) . rev ( ) {
2019-06-25 12:01:56 +00:00
let fsm = local_functions_vec [ f . local_function_id ] ;
let call_begin_offset = fsm . wasm_offset_to_target_offset [ f . wasm_inst_offset ] ;
let ( after_call_inst , diff_id ) = fsm . call_offsets . range ( ( Included ( & call_begin_offset ) , Unbounded ) ) . next ( ) . map ( | ( k , v ) | ( * k , * v ) ) . expect ( " instruction offset not found in call offsets " ) ;
let diff = & fsm . diffs [ diff_id ] ;
let state = diff . build_state ( fsm ) ;
stack_offset - = 1 ;
stack [ stack_offset ] = stack . as_ptr ( ) . offset ( last_stack_offset as isize ) as usize as u64 ; // push rbp
last_stack_offset = stack_offset as _ ;
let mut got_explicit_shadow = false ;
for v in state . stack_values . iter ( ) {
match * v {
MachineValue ::Undefined = > { stack_offset - = 1 } ,
MachineValue ::Vmctx = > {
stack_offset - = 1 ;
stack [ stack_offset ] = vmctx as * mut Ctx as usize as u64 ;
}
MachineValue ::PreserveRegister ( index ) = > {
stack_offset - = 1 ;
stack [ stack_offset ] = known_registers [ index . 0 ] . unwrap_or ( 0 ) ;
}
MachineValue ::CopyStackBPRelative ( byte_offset ) = > {
assert! ( byte_offset % 8 = = 0 ) ;
let target_offset = ( byte_offset / 8 ) as isize ;
let v = stack [ ( last_stack_offset as isize + target_offset ) as usize ] ;
stack_offset - = 1 ;
stack [ stack_offset ] = v ;
}
MachineValue ::ExplicitShadow = > {
assert! ( fsm . shadow_size % 8 = = 0 ) ;
stack_offset - = fsm . shadow_size / 8 ;
got_explicit_shadow = true ;
}
MachineValue ::WasmStack ( x ) = > {
stack_offset - = 1 ;
match state . wasm_stack [ x ] {
WasmAbstractValue ::Const ( x ) = > {
stack [ stack_offset ] = x ;
}
WasmAbstractValue ::Runtime = > {
stack [ stack_offset ] = f . stack [ x ] . unwrap ( ) ;
}
}
}
MachineValue ::WasmLocal ( x ) = > {
stack_offset - = 1 ;
match fsm . locals [ x ] {
WasmAbstractValue ::Const ( x ) = > {
stack [ stack_offset ] = x ;
}
WasmAbstractValue ::Runtime = > {
stack [ stack_offset ] = f . locals [ x ] . unwrap ( ) ;
}
}
}
}
}
assert! ( got_explicit_shadow ) ;
for ( i , v ) in state . register_values . iter ( ) . enumerate ( ) {
match * v {
MachineValue ::Undefined = > { } ,
MachineValue ::Vmctx = > {
known_registers [ i ] = Some ( vmctx as * mut Ctx as usize as u64 ) ;
}
MachineValue ::WasmStack ( x ) = > {
match state . wasm_stack [ x ] {
WasmAbstractValue ::Const ( x ) = > {
known_registers [ i ] = Some ( x ) ;
}
WasmAbstractValue ::Runtime = > {
known_registers [ i ] = Some ( f . stack [ x ] . unwrap ( ) ) ;
}
}
}
MachineValue ::WasmLocal ( x ) = > {
match fsm . locals [ x ] {
WasmAbstractValue ::Const ( x ) = > {
known_registers [ i ] = Some ( x ) ;
}
WasmAbstractValue ::Runtime = > {
known_registers [ i ] = Some ( f . locals [ x ] . unwrap ( ) ) ;
}
}
}
_ = > unreachable! ( )
}
}
assert! ( ( stack . len ( ) - stack_offset ) % 2 = = 0 ) ; // 16-byte alignment
stack_offset - = 1 ;
stack [ stack_offset ] = ( code_base + after_call_inst ) as u64 ; // return address
}
stack_offset - = 1 ;
stack [ stack_offset ] = known_registers [ X64Register ::GPR ( GPR ::R15 ) . to_index ( ) . 0 ] . unwrap_or ( 0 ) ;
stack_offset - = 1 ;
stack [ stack_offset ] = known_registers [ X64Register ::GPR ( GPR ::R14 ) . to_index ( ) . 0 ] . unwrap_or ( 0 ) ;
stack_offset - = 1 ;
stack [ stack_offset ] = known_registers [ X64Register ::GPR ( GPR ::R13 ) . to_index ( ) . 0 ] . unwrap_or ( 0 ) ;
stack_offset - = 1 ;
stack [ stack_offset ] = known_registers [ X64Register ::GPR ( GPR ::R12 ) . to_index ( ) . 0 ] . unwrap_or ( 0 ) ;
stack_offset - = 1 ;
stack [ stack_offset ] = known_registers [ X64Register ::GPR ( GPR ::RBX ) . to_index ( ) . 0 ] . unwrap_or ( 0 ) ;
stack_offset - = 1 ;
stack [ stack_offset ] = stack . as_ptr ( ) . offset ( last_stack_offset as isize ) as usize as u64 ; // rbp
2019-06-25 17:38:39 +00:00
if let Some ( ref memory ) = image . memory {
assert! ( vmctx . internal . memory_bound < = memory . len ( ) ) ;
if vmctx . internal . memory_bound < memory . len ( ) {
let grow : unsafe extern " C " fn ( ctx : & mut Ctx , memory_index : usize , delta : usize ) = ::std ::mem ::transmute ( ( * vmctx . internal . intrinsics ) . memory_grow ) ;
grow ( vmctx , 0 , ( memory . len ( ) - vmctx . internal . memory_bound ) / 65536 ) ;
assert_eq! ( vmctx . internal . memory_bound , memory . len ( ) ) ;
}
::std ::slice ::from_raw_parts_mut ( vmctx . internal . memory_base , vmctx . internal . memory_bound ) . copy_from_slice ( memory ) ;
}
let globals_len = ( * vmctx . module ) . info . globals . len ( ) ;
for i in 0 .. globals_len {
( * ( * vmctx . local_backing ) . globals [ LocalGlobalIndex ::new ( i ) ] . vm_local_global ( ) ) . data = image . globals [ i ] ;
}
2019-06-25 12:01:56 +00:00
run_on_wasm_stack ( stack . as_mut_ptr ( ) . offset ( stack . len ( ) as isize ) , stack . as_mut_ptr ( ) . offset ( stack_offset as isize ) )
}
2019-06-25 17:38:39 +00:00
pub fn build_instance_image (
vmctx : & mut Ctx ,
execution_state : ExecutionStateImage ,
) -> InstanceImage {
unsafe {
let memory = if vmctx . internal . memory_base . is_null ( ) {
None
} else {
Some ( ::std ::slice ::from_raw_parts ( vmctx . internal . memory_base , vmctx . internal . memory_bound ) . to_vec ( ) )
} ;
// FIXME: Imported globals
let globals_len = ( * vmctx . module ) . info . globals . len ( ) ;
let globals : Vec < u64 > = ( 0 .. globals_len ) . map ( | i | ( * vmctx . local_backing ) . globals [ LocalGlobalIndex ::new ( i ) ] . get ( ) . to_u64 ( ) ) . collect ( ) ;
InstanceImage {
memory : memory ,
globals : globals ,
execution_state : execution_state ,
}
}
}
2019-06-12 14:02:15 +00:00
#[ warn(unused_variables) ]
2019-06-24 19:56:20 +00:00
pub unsafe fn read_stack (
msm : & ModuleStateMap ,
code_base : usize ,
mut stack : * const u64 ,
initially_known_registers : [ Option < u64 > ; 24 ] ,
mut initial_address : Option < u64 > ,
2019-06-25 17:38:39 +00:00
) -> ExecutionStateImage {
2019-06-24 19:55:33 +00:00
let mut known_registers : [ Option < u64 > ; 24 ] = initially_known_registers ;
let mut results : Vec < WasmFunctionStateDump > = vec! [ ] ;
for _ in 0 .. {
let ret_addr = initial_address . take ( ) . unwrap_or_else ( | | {
let x = * stack ;
stack = stack . offset ( 1 ) ;
x
} ) ;
2019-06-24 19:56:20 +00:00
let ( fsm , state ) = match msm
. lookup_call_ip ( ret_addr as usize , code_base )
2019-06-24 19:55:33 +00:00
. or_else ( | | msm . lookup_trappable_ip ( ret_addr as usize , code_base ) )
{
2019-06-12 14:02:15 +00:00
Some ( x ) = > x ,
2019-06-25 17:38:39 +00:00
_ = > return ExecutionStateImage {
2019-06-25 12:01:56 +00:00
frames : results ,
}
2019-06-12 14:02:15 +00:00
} ;
2019-06-12 15:54:15 +00:00
2019-06-24 19:56:20 +00:00
let mut wasm_stack : Vec < Option < u64 > > = state
. wasm_stack
. iter ( )
2019-06-24 19:55:33 +00:00
. map ( | x | match * x {
WasmAbstractValue ::Const ( x ) = > Some ( x ) ,
WasmAbstractValue ::Runtime = > None ,
2019-06-24 19:56:20 +00:00
} )
. collect ( ) ;
let mut wasm_locals : Vec < Option < u64 > > = fsm
. locals
. iter ( )
2019-06-24 19:55:33 +00:00
. map ( | x | match * x {
WasmAbstractValue ::Const ( x ) = > Some ( x ) ,
WasmAbstractValue ::Runtime = > None ,
2019-06-24 19:56:20 +00:00
} )
. collect ( ) ;
2019-06-24 19:55:33 +00:00
2019-06-12 15:54:15 +00:00
// This must be before the next loop because that modifies `known_registers`.
for ( i , v ) in state . register_values . iter ( ) . enumerate ( ) {
match * v {
MachineValue ::Undefined = > { }
2019-06-25 12:01:56 +00:00
MachineValue ::Vmctx = > { }
2019-06-12 15:54:15 +00:00
MachineValue ::WasmStack ( idx ) = > {
if let Some ( v ) = known_registers [ i ] {
2019-06-24 19:55:33 +00:00
wasm_stack [ idx ] = Some ( v ) ;
2019-06-25 17:38:39 +00:00
} else {
eprintln! ( " BUG: Register {} for WebAssembly stack slot {} has unknown value. " , i , idx ) ;
2019-06-12 15:54:15 +00:00
}
}
MachineValue ::WasmLocal ( idx ) = > {
if let Some ( v ) = known_registers [ i ] {
2019-06-24 19:55:33 +00:00
wasm_locals [ idx ] = Some ( v ) ;
2019-06-12 15:54:15 +00:00
}
}
_ = > unreachable! ( ) ,
}
}
2019-06-12 14:02:15 +00:00
let mut found_shadow = false ;
2019-06-24 19:55:33 +00:00
for v in state . stack_values . iter ( ) {
match * v {
MachineValue ::ExplicitShadow = > {
found_shadow = true ;
break ;
}
_ = > { }
}
}
if ! found_shadow {
stack = stack . offset ( ( fsm . shadow_size / 8 ) as isize ) ;
}
2019-06-12 14:02:15 +00:00
for v in state . stack_values . iter ( ) . rev ( ) {
match * v {
MachineValue ::ExplicitShadow = > {
stack = stack . offset ( ( fsm . shadow_size / 8 ) as isize ) ;
}
MachineValue ::Undefined = > {
stack = stack . offset ( 1 ) ;
}
2019-06-25 12:01:56 +00:00
MachineValue ::Vmctx = > {
stack = stack . offset ( 1 ) ;
}
2019-06-12 14:02:15 +00:00
MachineValue ::PreserveRegister ( idx ) = > {
2019-06-12 15:54:15 +00:00
known_registers [ idx . 0 ] = Some ( * stack ) ;
2019-06-12 14:02:15 +00:00
stack = stack . offset ( 1 ) ;
}
MachineValue ::CopyStackBPRelative ( offset ) = > {
stack = stack . offset ( 1 ) ;
}
MachineValue ::WasmStack ( idx ) = > {
2019-06-24 19:55:33 +00:00
wasm_stack [ idx ] = Some ( * stack ) ;
2019-06-12 14:02:15 +00:00
stack = stack . offset ( 1 ) ;
}
MachineValue ::WasmLocal ( idx ) = > {
2019-06-24 19:55:33 +00:00
wasm_locals [ idx ] = Some ( * stack ) ;
2019-06-12 14:02:15 +00:00
stack = stack . offset ( 1 ) ;
}
}
}
stack = stack . offset ( 1 ) ; // RBP
2019-06-24 19:56:20 +00:00
wasm_stack . truncate (
wasm_stack
. len ( )
. checked_sub ( state . wasm_stack_private_depth )
. unwrap ( ) ,
) ;
2019-06-24 19:55:33 +00:00
let wfs = WasmFunctionStateDump {
local_function_id : fsm . local_function_id ,
2019-06-25 12:01:56 +00:00
wasm_inst_offset : state . wasm_inst_offset ,
2019-06-24 19:55:33 +00:00
stack : wasm_stack ,
locals : wasm_locals ,
2019-06-12 14:02:15 +00:00
} ;
2019-06-24 19:55:33 +00:00
results . push ( wfs ) ;
2019-06-12 05:38:58 +00:00
}
2019-06-24 19:55:33 +00:00
unreachable! ( ) ;
2019-06-12 05:38:58 +00:00
}
2019-06-09 13:21:18 +00:00
#[ repr(u8) ]
#[ derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash) ]
pub enum GPR {
RAX ,
RCX ,
RDX ,
RBX ,
RSP ,
RBP ,
RSI ,
RDI ,
R8 ,
R9 ,
R10 ,
R11 ,
R12 ,
R13 ,
R14 ,
R15 ,
}
#[ repr(u8) ]
#[ derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash) ]
pub enum XMM {
XMM0 ,
XMM1 ,
XMM2 ,
XMM3 ,
XMM4 ,
XMM5 ,
XMM6 ,
XMM7 ,
}
pub enum X64Register {
GPR ( GPR ) ,
XMM ( XMM ) ,
}
impl X64Register {
pub fn to_index ( & self ) -> RegisterIndex {
match * self {
X64Register ::GPR ( x ) = > RegisterIndex ( x as usize ) ,
2019-06-11 12:49:30 +00:00
X64Register ::XMM ( x ) = > RegisterIndex ( x as usize + 16 ) ,
2019-06-09 13:21:18 +00:00
}
}
}
}