2019-06-12 05:38:58 +00:00
|
|
|
use std::collections::BTreeMap;
|
2019-06-26 04:56:37 +00:00
|
|
|
use std::ops::Bound::{Included, Unbounded};
|
2019-06-12 05:38:58 +00:00
|
|
|
|
2019-06-11 12:49:30 +00:00
|
|
|
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
2019-06-09 13:21:18 +00:00
|
|
|
pub struct RegisterIndex(pub usize);
|
|
|
|
|
2019-06-24 19:55:33 +00:00
|
|
|
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
|
|
|
pub enum WasmAbstractValue {
|
|
|
|
Runtime,
|
|
|
|
Const(u64),
|
|
|
|
}
|
|
|
|
|
2019-06-11 12:49:30 +00:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct MachineState {
|
|
|
|
pub stack_values: Vec<MachineValue>,
|
|
|
|
pub register_values: Vec<MachineValue>,
|
2019-06-24 19:55:33 +00:00
|
|
|
|
|
|
|
pub wasm_stack: Vec<WasmAbstractValue>,
|
|
|
|
pub wasm_stack_private_depth: usize,
|
2019-06-25 12:01:56 +00:00
|
|
|
|
|
|
|
pub wasm_inst_offset: usize,
|
2019-06-09 13:21:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, Default)]
|
2019-06-11 12:49:30 +00:00
|
|
|
pub struct MachineStateDiff {
|
2019-06-09 13:21:18 +00:00
|
|
|
pub last: Option<usize>,
|
2019-06-11 12:49:30 +00:00
|
|
|
pub stack_push: Vec<MachineValue>,
|
|
|
|
pub stack_pop: usize,
|
|
|
|
pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
|
2019-06-24 19:55:33 +00:00
|
|
|
|
|
|
|
pub wasm_stack_push: Vec<WasmAbstractValue>,
|
|
|
|
pub wasm_stack_pop: usize,
|
|
|
|
pub wasm_stack_private_depth: usize, // absolute value; not a diff.
|
2019-06-25 12:01:56 +00:00
|
|
|
|
|
|
|
pub wasm_inst_offset: usize, // absolute value; not a diff.
|
2019-06-11 12:49:30 +00:00
|
|
|
}
|
|
|
|
|
2019-07-24 18:44:28 +00:00
|
|
|
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
2019-06-11 12:49:30 +00:00
|
|
|
pub enum MachineValue {
|
|
|
|
Undefined,
|
2019-06-25 12:01:56 +00:00
|
|
|
Vmctx,
|
2019-06-11 12:49:30 +00:00
|
|
|
PreserveRegister(RegisterIndex),
|
2019-06-11 16:21:43 +00:00
|
|
|
CopyStackBPRelative(i32), // relative to Base Pointer, in byte offset
|
2019-06-12 14:02:15 +00:00
|
|
|
ExplicitShadow, // indicates that all values above this are above the shadow region
|
2019-06-11 12:49:30 +00:00
|
|
|
WasmStack(usize),
|
|
|
|
WasmLocal(usize),
|
2019-07-24 18:44:28 +00:00
|
|
|
TwoHalves(Box<(MachineValue, MachineValue)>), // 32-bit values. TODO: optimize: add another type for inner "half" value to avoid boxing?
|
2019-06-09 13:21:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
2019-06-11 12:49:30 +00:00
|
|
|
pub struct FunctionStateMap {
|
|
|
|
pub initial: MachineState,
|
2019-06-24 19:55:33 +00:00
|
|
|
pub local_function_id: usize,
|
|
|
|
pub locals: Vec<WasmAbstractValue>,
|
2019-06-11 16:21:43 +00:00
|
|
|
pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64
|
2019-06-11 12:49:30 +00:00
|
|
|
pub diffs: Vec<MachineStateDiff>,
|
2019-06-27 09:54:06 +00:00
|
|
|
pub wasm_function_header_target_offset: Option<SuspendOffset>,
|
|
|
|
pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
|
2019-06-27 07:49:43 +00:00
|
|
|
pub loop_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
|
|
|
pub call_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
|
|
|
pub trappable_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
|
|
|
}
|
|
|
|
|
2019-06-27 09:54:06 +00:00
|
|
|
#[derive(Clone, Copy, Debug)]
|
|
|
|
pub enum SuspendOffset {
|
|
|
|
Loop(usize),
|
|
|
|
Call(usize),
|
|
|
|
Trappable(usize),
|
|
|
|
}
|
|
|
|
|
2019-06-27 07:49:43 +00:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct OffsetInfo {
|
|
|
|
pub diff_id: usize,
|
|
|
|
pub activate_offset: usize,
|
2019-06-12 05:38:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct ModuleStateMap {
|
|
|
|
pub local_functions: BTreeMap<usize, FunctionStateMap>,
|
|
|
|
pub total_size: usize,
|
|
|
|
}
|
|
|
|
|
2019-06-25 12:01:56 +00:00
|
|
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
2019-06-24 19:55:33 +00:00
|
|
|
pub struct WasmFunctionStateDump {
|
|
|
|
pub local_function_id: usize,
|
2019-06-25 12:01:56 +00:00
|
|
|
pub wasm_inst_offset: usize,
|
2019-06-24 19:55:33 +00:00
|
|
|
pub stack: Vec<Option<u64>>,
|
|
|
|
pub locals: Vec<Option<u64>>,
|
2019-06-12 14:02:15 +00:00
|
|
|
}
|
|
|
|
|
2019-06-25 12:01:56 +00:00
|
|
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
2019-06-25 17:38:39 +00:00
|
|
|
pub struct ExecutionStateImage {
|
2019-06-25 12:01:56 +00:00
|
|
|
pub frames: Vec<WasmFunctionStateDump>,
|
|
|
|
}
|
|
|
|
|
2019-06-25 17:38:39 +00:00
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
pub struct InstanceImage {
|
|
|
|
pub memory: Option<Vec<u8>>,
|
|
|
|
pub globals: Vec<u64>,
|
|
|
|
pub execution_state: ExecutionStateImage,
|
|
|
|
}
|
|
|
|
|
2019-06-24 19:55:33 +00:00
|
|
|
impl ModuleStateMap {
|
2019-06-24 19:56:20 +00:00
|
|
|
fn lookup_call_ip(&self, ip: usize, base: usize) -> Option<(&FunctionStateMap, MachineState)> {
|
2019-06-24 19:55:33 +00:00
|
|
|
if ip < base || ip - base >= self.total_size {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
let (_, fsm) = self
|
|
|
|
.local_functions
|
|
|
|
.range((Unbounded, Included(&(ip - base))))
|
|
|
|
.last()
|
|
|
|
.unwrap();
|
2019-06-12 14:02:15 +00:00
|
|
|
|
2019-06-24 19:55:33 +00:00
|
|
|
match fsm.call_offsets.get(&(ip - base)) {
|
2019-07-12 16:17:33 +00:00
|
|
|
Some(x) => {
|
|
|
|
if x.diff_id < fsm.diffs.len() {
|
|
|
|
Some((fsm, fsm.diffs[x.diff_id].build_state(fsm)))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
2019-06-24 19:55:33 +00:00
|
|
|
None => None,
|
2019-06-12 14:02:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-24 19:55:33 +00:00
|
|
|
fn lookup_trappable_ip(
|
2019-06-12 14:02:15 +00:00
|
|
|
&self,
|
|
|
|
ip: usize,
|
|
|
|
base: usize,
|
|
|
|
) -> Option<(&FunctionStateMap, MachineState)> {
|
2019-06-12 05:38:58 +00:00
|
|
|
if ip < base || ip - base >= self.total_size {
|
|
|
|
None
|
|
|
|
} else {
|
2019-06-24 19:55:33 +00:00
|
|
|
let (_, fsm) = self
|
2019-06-12 14:02:15 +00:00
|
|
|
.local_functions
|
|
|
|
.range((Unbounded, Included(&(ip - base))))
|
|
|
|
.last()
|
|
|
|
.unwrap();
|
2019-06-24 19:55:33 +00:00
|
|
|
|
|
|
|
match fsm.trappable_offsets.get(&(ip - base)) {
|
2019-07-12 16:17:33 +00:00
|
|
|
Some(x) => {
|
|
|
|
if x.diff_id < fsm.diffs.len() {
|
|
|
|
Some((fsm, fsm.diffs[x.diff_id].build_state(fsm)))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
2019-06-27 07:49:43 +00:00
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lookup_loop_ip(&self, ip: usize, base: usize) -> Option<(&FunctionStateMap, MachineState)> {
|
|
|
|
if ip < base || ip - base >= self.total_size {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
let (_, fsm) = self
|
|
|
|
.local_functions
|
|
|
|
.range((Unbounded, Included(&(ip - base))))
|
|
|
|
.last()
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
match fsm.loop_offsets.get(&(ip - base)) {
|
2019-07-12 16:17:33 +00:00
|
|
|
Some(x) => {
|
|
|
|
if x.diff_id < fsm.diffs.len() {
|
|
|
|
Some((fsm, fsm.diffs[x.diff_id].build_state(fsm)))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
2019-06-24 19:55:33 +00:00
|
|
|
None => None,
|
|
|
|
}
|
2019-06-12 05:38:58 +00:00
|
|
|
}
|
|
|
|
}
|
2019-06-11 12:49:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl FunctionStateMap {
|
2019-06-24 19:56:20 +00:00
|
|
|
pub fn new(
|
|
|
|
initial: MachineState,
|
|
|
|
local_function_id: usize,
|
|
|
|
shadow_size: usize,
|
|
|
|
locals: Vec<WasmAbstractValue>,
|
|
|
|
) -> FunctionStateMap {
|
2019-06-11 12:49:30 +00:00
|
|
|
FunctionStateMap {
|
|
|
|
initial,
|
2019-06-24 19:55:33 +00:00
|
|
|
local_function_id,
|
2019-06-11 16:21:43 +00:00
|
|
|
shadow_size,
|
2019-06-24 19:55:33 +00:00
|
|
|
locals,
|
2019-06-11 12:49:30 +00:00
|
|
|
diffs: vec![],
|
2019-06-27 07:49:43 +00:00
|
|
|
wasm_function_header_target_offset: None,
|
2019-06-27 09:54:06 +00:00
|
|
|
wasm_offset_to_target_offset: BTreeMap::new(),
|
2019-06-12 05:38:58 +00:00
|
|
|
loop_offsets: BTreeMap::new(),
|
|
|
|
call_offsets: BTreeMap::new(),
|
2019-06-24 19:55:33 +00:00
|
|
|
trappable_offsets: BTreeMap::new(),
|
2019-06-11 12:49:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MachineState {
|
|
|
|
pub fn diff(&self, old: &MachineState) -> MachineStateDiff {
|
2019-06-12 14:02:15 +00:00
|
|
|
let first_diff_stack_depth: usize = self
|
|
|
|
.stack_values
|
|
|
|
.iter()
|
|
|
|
.zip(old.stack_values.iter())
|
|
|
|
.enumerate()
|
2019-07-24 18:44:28 +00:00
|
|
|
.find(|&(_, (a, b))| a != b)
|
2019-06-12 14:02:15 +00:00
|
|
|
.map(|x| x.0)
|
2019-06-11 12:49:30 +00:00
|
|
|
.unwrap_or(old.stack_values.len().min(self.stack_values.len()));
|
|
|
|
assert_eq!(self.register_values.len(), old.register_values.len());
|
2019-06-12 14:02:15 +00:00
|
|
|
let reg_diff: Vec<_> = self
|
|
|
|
.register_values
|
|
|
|
.iter()
|
|
|
|
.zip(old.register_values.iter())
|
|
|
|
.enumerate()
|
2019-07-24 18:44:28 +00:00
|
|
|
.filter(|&(_, (a, b))| a != b)
|
|
|
|
.map(|(i, (a, _))| (RegisterIndex(i), a.clone()))
|
2019-06-11 12:49:30 +00:00
|
|
|
.collect();
|
2019-06-24 19:55:33 +00:00
|
|
|
let first_diff_wasm_stack_depth: usize = self
|
|
|
|
.wasm_stack
|
|
|
|
.iter()
|
|
|
|
.zip(old.wasm_stack.iter())
|
|
|
|
.enumerate()
|
2019-07-24 18:44:28 +00:00
|
|
|
.find(|&(_, (a, b))| a != b)
|
2019-06-24 19:55:33 +00:00
|
|
|
.map(|x| x.0)
|
|
|
|
.unwrap_or(old.wasm_stack.len().min(self.wasm_stack.len()));
|
2019-06-11 12:49:30 +00:00
|
|
|
MachineStateDiff {
|
|
|
|
last: None,
|
|
|
|
stack_push: self.stack_values[first_diff_stack_depth..].to_vec(),
|
|
|
|
stack_pop: old.stack_values.len() - first_diff_stack_depth,
|
|
|
|
reg_diff: reg_diff,
|
2019-06-24 19:55:33 +00:00
|
|
|
|
|
|
|
wasm_stack_push: self.wasm_stack[first_diff_wasm_stack_depth..].to_vec(),
|
|
|
|
wasm_stack_pop: old.wasm_stack.len() - first_diff_wasm_stack_depth,
|
|
|
|
wasm_stack_private_depth: self.wasm_stack_private_depth,
|
2019-06-25 12:01:56 +00:00
|
|
|
|
|
|
|
wasm_inst_offset: self.wasm_inst_offset,
|
2019-06-11 12:49:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MachineStateDiff {
|
|
|
|
pub fn build_state(&self, m: &FunctionStateMap) -> MachineState {
|
|
|
|
let mut chain: Vec<&MachineStateDiff> = vec![];
|
|
|
|
chain.push(self);
|
|
|
|
let mut current = self.last;
|
|
|
|
while let Some(x) = current {
|
|
|
|
let that = &m.diffs[x];
|
|
|
|
current = that.last;
|
|
|
|
chain.push(that);
|
|
|
|
}
|
|
|
|
chain.reverse();
|
|
|
|
let mut state = m.initial.clone();
|
|
|
|
for x in chain {
|
|
|
|
for _ in 0..x.stack_pop {
|
|
|
|
state.stack_values.pop().unwrap();
|
|
|
|
}
|
|
|
|
for v in &x.stack_push {
|
2019-07-24 18:44:28 +00:00
|
|
|
state.stack_values.push(v.clone());
|
2019-06-11 12:49:30 +00:00
|
|
|
}
|
2019-07-24 18:44:28 +00:00
|
|
|
for &(index, ref v) in &x.reg_diff {
|
|
|
|
state.register_values[index.0] = v.clone();
|
2019-06-11 12:49:30 +00:00
|
|
|
}
|
2019-06-24 19:55:33 +00:00
|
|
|
for _ in 0..x.wasm_stack_pop {
|
|
|
|
state.wasm_stack.pop().unwrap();
|
|
|
|
}
|
|
|
|
for v in &x.wasm_stack_push {
|
|
|
|
state.wasm_stack.push(*v);
|
|
|
|
}
|
2019-06-11 12:49:30 +00:00
|
|
|
}
|
2019-06-24 19:55:33 +00:00
|
|
|
state.wasm_stack_private_depth = self.wasm_stack_private_depth;
|
2019-06-25 12:01:56 +00:00
|
|
|
state.wasm_inst_offset = self.wasm_inst_offset;
|
2019-06-11 12:49:30 +00:00
|
|
|
state
|
|
|
|
}
|
2019-06-09 13:21:18 +00:00
|
|
|
}
|
|
|
|
|
2019-06-25 17:38:39 +00:00
|
|
|
impl ExecutionStateImage {
|
2019-06-26 03:28:46 +00:00
|
|
|
pub fn print_backtrace_if_needed(&self) {
|
|
|
|
use std::env;
|
|
|
|
|
|
|
|
if let Ok(x) = env::var("WASMER_BACKTRACE") {
|
|
|
|
if x == "1" {
|
|
|
|
eprintln!("{}", self.colored_output());
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
eprintln!("Run with `WASMER_BACKTRACE=1` environment variable to display a backtrace.");
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn colored_output(&self) -> String {
|
|
|
|
use colored::*;
|
|
|
|
|
|
|
|
fn join_strings(x: impl Iterator<Item = String>, sep: &str) -> String {
|
|
|
|
let mut ret = String::new();
|
|
|
|
let mut first = true;
|
|
|
|
|
|
|
|
for s in x {
|
|
|
|
if first {
|
|
|
|
first = false;
|
|
|
|
} else {
|
|
|
|
ret += sep;
|
|
|
|
}
|
|
|
|
ret += &s;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret
|
|
|
|
}
|
|
|
|
|
|
|
|
fn format_optional_u64_sequence(x: &[Option<u64>]) -> String {
|
|
|
|
if x.len() == 0 {
|
|
|
|
"(empty)".into()
|
|
|
|
} else {
|
|
|
|
join_strings(
|
|
|
|
x.iter().enumerate().map(|(i, x)| {
|
|
|
|
format!(
|
|
|
|
"[{}] = {}",
|
|
|
|
i,
|
|
|
|
x.map(|x| format!("{}", x))
|
|
|
|
.unwrap_or_else(|| "?".to_string())
|
|
|
|
.bold()
|
|
|
|
.cyan()
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
", ",
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut ret = String::new();
|
|
|
|
|
|
|
|
if self.frames.len() == 0 {
|
|
|
|
ret += &"Unknown fault address, cannot read stack.".yellow();
|
|
|
|
ret += "\n";
|
|
|
|
} else {
|
|
|
|
ret += &"Backtrace:".bold();
|
|
|
|
ret += "\n";
|
|
|
|
for (i, f) in self.frames.iter().enumerate() {
|
|
|
|
ret += &format!("* Frame {} @ Local function {}", i, f.local_function_id).bold();
|
|
|
|
ret += "\n";
|
|
|
|
ret += &format!(
|
|
|
|
" {} {}\n",
|
|
|
|
"Offset:".bold().yellow(),
|
|
|
|
format!("{}", f.wasm_inst_offset).bold().cyan(),
|
|
|
|
);
|
|
|
|
ret += &format!(
|
|
|
|
" {} {}\n",
|
|
|
|
"Locals:".bold().yellow(),
|
|
|
|
format_optional_u64_sequence(&f.locals)
|
|
|
|
);
|
|
|
|
ret += &format!(
|
|
|
|
" {} {}\n\n",
|
|
|
|
"Stack:".bold().yellow(),
|
|
|
|
format_optional_u64_sequence(&f.stack)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ret
|
|
|
|
}
|
2019-06-25 12:01:56 +00:00
|
|
|
}
|
|
|
|
|
2019-06-26 16:41:07 +00:00
|
|
|
impl InstanceImage {
|
|
|
|
pub fn from_bytes(input: &[u8]) -> Option<InstanceImage> {
|
|
|
|
use bincode::deserialize;
|
|
|
|
match deserialize(input) {
|
|
|
|
Ok(x) => Some(x),
|
|
|
|
Err(_) => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn to_bytes(&self) -> Vec<u8> {
|
|
|
|
use bincode::serialize;
|
|
|
|
serialize(self).unwrap()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-09 13:21:18 +00:00
|
|
|
#[cfg(all(unix, target_arch = "x86_64"))]
|
|
|
|
pub mod x64 {
|
|
|
|
use super::*;
|
2019-07-03 17:45:06 +00:00
|
|
|
use crate::codegen::BreakpointMap;
|
2019-07-03 17:45:54 +00:00
|
|
|
use crate::fault::{catch_unsafe_unwind, run_on_alternative_stack};
|
2019-06-25 17:38:39 +00:00
|
|
|
use crate::structures::TypedIndex;
|
2019-06-25 17:39:30 +00:00
|
|
|
use crate::types::LocalGlobalIndex;
|
|
|
|
use crate::vm::Ctx;
|
2019-06-26 17:04:59 +00:00
|
|
|
use std::any::Any;
|
2019-06-09 13:21:18 +00:00
|
|
|
|
2019-06-11 12:49:30 +00:00
|
|
|
pub fn new_machine_state() -> MachineState {
|
|
|
|
MachineState {
|
|
|
|
stack_values: vec![],
|
|
|
|
register_values: vec![MachineValue::Undefined; 16 + 8],
|
2019-06-24 19:55:33 +00:00
|
|
|
wasm_stack: vec![],
|
|
|
|
wasm_stack_private_depth: 0,
|
2019-06-25 12:01:56 +00:00
|
|
|
wasm_inst_offset: ::std::usize::MAX,
|
2019-06-11 12:49:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-25 12:01:56 +00:00
|
|
|
#[warn(unused_variables)]
|
|
|
|
pub unsafe fn invoke_call_return_on_stack(
|
|
|
|
msm: &ModuleStateMap,
|
|
|
|
code_base: usize,
|
2019-06-26 16:41:07 +00:00
|
|
|
image: InstanceImage,
|
2019-06-25 12:01:56 +00:00
|
|
|
vmctx: &mut Ctx,
|
2019-07-03 17:45:06 +00:00
|
|
|
breakpoints: Option<BreakpointMap>,
|
2019-06-26 17:04:59 +00:00
|
|
|
) -> Result<u64, Box<dyn Any>> {
|
2019-06-25 12:01:56 +00:00
|
|
|
let mut stack: Vec<u64> = vec![0; 1048576 * 8 / 8]; // 8MB stack
|
|
|
|
let mut stack_offset: usize = stack.len();
|
|
|
|
|
|
|
|
stack_offset -= 3; // placeholder for call return
|
|
|
|
|
|
|
|
let mut last_stack_offset: u64 = 0; // rbp
|
|
|
|
|
|
|
|
let mut known_registers: [Option<u64>; 24] = [None; 24];
|
|
|
|
|
2019-06-25 17:39:30 +00:00
|
|
|
let local_functions_vec: Vec<&FunctionStateMap> =
|
2019-06-26 16:41:07 +00:00
|
|
|
msm.local_functions.iter().map(|(_, v)| v).collect();
|
2019-06-25 12:01:56 +00:00
|
|
|
|
|
|
|
// Bottom to top
|
2019-06-25 17:38:39 +00:00
|
|
|
for f in image.execution_state.frames.iter().rev() {
|
2019-06-25 12:01:56 +00:00
|
|
|
let fsm = local_functions_vec[f.local_function_id];
|
2019-06-27 09:54:06 +00:00
|
|
|
let suspend_offset = if f.wasm_inst_offset == ::std::usize::MAX {
|
|
|
|
fsm.wasm_function_header_target_offset
|
2019-06-27 07:49:43 +00:00
|
|
|
} else {
|
2019-06-27 09:54:06 +00:00
|
|
|
fsm.wasm_offset_to_target_offset
|
|
|
|
.get(&f.wasm_inst_offset)
|
|
|
|
.map(|x| *x)
|
|
|
|
}
|
|
|
|
.expect("instruction is not a critical point");
|
|
|
|
|
|
|
|
let (activate_offset, diff_id) = match suspend_offset {
|
|
|
|
SuspendOffset::Loop(x) => fsm.loop_offsets.get(&x),
|
|
|
|
SuspendOffset::Call(x) => fsm.call_offsets.get(&x),
|
|
|
|
SuspendOffset::Trappable(x) => fsm.trappable_offsets.get(&x),
|
|
|
|
}
|
|
|
|
.map(|x| (x.activate_offset, x.diff_id))
|
|
|
|
.expect("offset cannot be found in table");
|
2019-06-26 03:28:46 +00:00
|
|
|
|
2019-06-25 12:01:56 +00:00
|
|
|
let diff = &fsm.diffs[diff_id];
|
|
|
|
let state = diff.build_state(fsm);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; // push rbp
|
|
|
|
last_stack_offset = stack_offset as _;
|
|
|
|
|
|
|
|
let mut got_explicit_shadow = false;
|
|
|
|
|
|
|
|
for v in state.stack_values.iter() {
|
|
|
|
match *v {
|
2019-06-25 17:39:30 +00:00
|
|
|
MachineValue::Undefined => stack_offset -= 1,
|
2019-06-25 12:01:56 +00:00
|
|
|
MachineValue::Vmctx => {
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = vmctx as *mut Ctx as usize as u64;
|
|
|
|
}
|
|
|
|
MachineValue::PreserveRegister(index) => {
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[index.0].unwrap_or(0);
|
|
|
|
}
|
|
|
|
MachineValue::CopyStackBPRelative(byte_offset) => {
|
|
|
|
assert!(byte_offset % 8 == 0);
|
|
|
|
let target_offset = (byte_offset / 8) as isize;
|
|
|
|
let v = stack[(last_stack_offset as isize + target_offset) as usize];
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = v;
|
|
|
|
}
|
|
|
|
MachineValue::ExplicitShadow => {
|
|
|
|
assert!(fsm.shadow_size % 8 == 0);
|
|
|
|
stack_offset -= fsm.shadow_size / 8;
|
|
|
|
got_explicit_shadow = true;
|
|
|
|
}
|
|
|
|
MachineValue::WasmStack(x) => {
|
|
|
|
stack_offset -= 1;
|
|
|
|
match state.wasm_stack[x] {
|
|
|
|
WasmAbstractValue::Const(x) => {
|
|
|
|
stack[stack_offset] = x;
|
|
|
|
}
|
|
|
|
WasmAbstractValue::Runtime => {
|
|
|
|
stack[stack_offset] = f.stack[x].unwrap();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
MachineValue::WasmLocal(x) => {
|
|
|
|
stack_offset -= 1;
|
|
|
|
match fsm.locals[x] {
|
|
|
|
WasmAbstractValue::Const(x) => {
|
|
|
|
stack[stack_offset] = x;
|
|
|
|
}
|
|
|
|
WasmAbstractValue::Runtime => {
|
|
|
|
stack[stack_offset] = f.locals[x].unwrap();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-07-24 18:44:28 +00:00
|
|
|
MachineValue::TwoHalves(ref inner) => {
|
|
|
|
stack_offset -= 1;
|
|
|
|
// TODO: Cleanup
|
|
|
|
match inner.0 {
|
|
|
|
MachineValue::WasmStack(x) => {
|
|
|
|
match state.wasm_stack[x] {
|
|
|
|
WasmAbstractValue::Const(x) => {
|
|
|
|
assert!(x <= ::std::u32::MAX as u64);
|
|
|
|
stack[stack_offset] |= x;
|
|
|
|
}
|
|
|
|
WasmAbstractValue::Runtime => {
|
|
|
|
let v = f.stack[x].unwrap();
|
|
|
|
assert!(v <= ::std::u32::MAX as u64);
|
|
|
|
stack[stack_offset] |= v;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
MachineValue::WasmLocal(x) => {
|
|
|
|
stack_offset -= 1;
|
|
|
|
match fsm.locals[x] {
|
|
|
|
WasmAbstractValue::Const(x) => {
|
|
|
|
assert!(x <= ::std::u32::MAX as u64);
|
|
|
|
stack[stack_offset] |= x;
|
|
|
|
}
|
|
|
|
WasmAbstractValue::Runtime => {
|
|
|
|
let v = f.locals[x].unwrap();
|
|
|
|
assert!(v <= ::std::u32::MAX as u64);
|
|
|
|
stack[stack_offset] |= v;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
MachineValue::Undefined => {}
|
|
|
|
_ => unimplemented!("TwoHalves.0"),
|
|
|
|
}
|
|
|
|
match inner.1 {
|
|
|
|
MachineValue::WasmStack(x) => {
|
|
|
|
match state.wasm_stack[x] {
|
|
|
|
WasmAbstractValue::Const(x) => {
|
|
|
|
assert!(x <= ::std::u32::MAX as u64);
|
|
|
|
stack[stack_offset] |= x << 32;
|
|
|
|
}
|
|
|
|
WasmAbstractValue::Runtime => {
|
|
|
|
let v = f.stack[x].unwrap();
|
|
|
|
assert!(v <= ::std::u32::MAX as u64);
|
|
|
|
stack[stack_offset] |= v << 32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
MachineValue::WasmLocal(x) => {
|
|
|
|
stack_offset -= 1;
|
|
|
|
match fsm.locals[x] {
|
|
|
|
WasmAbstractValue::Const(x) => {
|
|
|
|
assert!(x <= ::std::u32::MAX as u64);
|
|
|
|
stack[stack_offset] |= x << 32;
|
|
|
|
}
|
|
|
|
WasmAbstractValue::Runtime => {
|
|
|
|
let v = f.locals[x].unwrap();
|
|
|
|
assert!(v <= ::std::u32::MAX as u64);
|
|
|
|
stack[stack_offset] |= v << 32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
MachineValue::Undefined => {}
|
|
|
|
_ => unimplemented!("TwoHalves.1"),
|
|
|
|
}
|
|
|
|
}
|
2019-06-25 12:01:56 +00:00
|
|
|
}
|
|
|
|
}
|
2019-06-27 07:49:43 +00:00
|
|
|
if !got_explicit_shadow {
|
|
|
|
assert!(fsm.shadow_size % 8 == 0);
|
|
|
|
stack_offset -= fsm.shadow_size / 8;
|
|
|
|
}
|
2019-06-25 12:01:56 +00:00
|
|
|
for (i, v) in state.register_values.iter().enumerate() {
|
|
|
|
match *v {
|
2019-06-25 17:39:30 +00:00
|
|
|
MachineValue::Undefined => {}
|
2019-06-25 12:01:56 +00:00
|
|
|
MachineValue::Vmctx => {
|
|
|
|
known_registers[i] = Some(vmctx as *mut Ctx as usize as u64);
|
|
|
|
}
|
2019-06-25 17:39:30 +00:00
|
|
|
MachineValue::WasmStack(x) => match state.wasm_stack[x] {
|
|
|
|
WasmAbstractValue::Const(x) => {
|
|
|
|
known_registers[i] = Some(x);
|
2019-06-25 12:01:56 +00:00
|
|
|
}
|
2019-06-25 17:39:30 +00:00
|
|
|
WasmAbstractValue::Runtime => {
|
|
|
|
known_registers[i] = Some(f.stack[x].unwrap());
|
2019-06-25 12:01:56 +00:00
|
|
|
}
|
2019-06-25 17:39:30 +00:00
|
|
|
},
|
|
|
|
MachineValue::WasmLocal(x) => match fsm.locals[x] {
|
|
|
|
WasmAbstractValue::Const(x) => {
|
|
|
|
known_registers[i] = Some(x);
|
|
|
|
}
|
|
|
|
WasmAbstractValue::Runtime => {
|
|
|
|
known_registers[i] = Some(f.locals[x].unwrap());
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_ => unreachable!(),
|
2019-06-25 12:01:56 +00:00
|
|
|
}
|
|
|
|
}
|
2019-06-27 07:49:43 +00:00
|
|
|
|
|
|
|
// no need to check 16-byte alignment here because it's possible that we're not at a call entry.
|
|
|
|
|
2019-06-25 12:01:56 +00:00
|
|
|
stack_offset -= 1;
|
2019-06-27 09:54:06 +00:00
|
|
|
stack[stack_offset] = (code_base + activate_offset) as u64; // return address
|
2019-06-25 12:01:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R15).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R14).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R13).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R12).to_index().0].unwrap_or(0);
|
|
|
|
|
2019-06-27 07:49:43 +00:00
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R11).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R10).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R9).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R8).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RSI).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDI).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDX).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RCX).to_index().0].unwrap_or(0);
|
|
|
|
|
2019-06-25 12:01:56 +00:00
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RBX).to_index().0].unwrap_or(0);
|
|
|
|
|
2019-06-27 07:49:43 +00:00
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RAX).to_index().0].unwrap_or(0);
|
|
|
|
|
2019-06-25 12:01:56 +00:00
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; // rbp
|
|
|
|
|
2019-06-27 07:49:43 +00:00
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] =
|
|
|
|
known_registers[X64Register::XMM(XMM::XMM7).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] =
|
|
|
|
known_registers[X64Register::XMM(XMM::XMM6).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] =
|
|
|
|
known_registers[X64Register::XMM(XMM::XMM5).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] =
|
|
|
|
known_registers[X64Register::XMM(XMM::XMM4).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] =
|
|
|
|
known_registers[X64Register::XMM(XMM::XMM3).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] =
|
|
|
|
known_registers[X64Register::XMM(XMM::XMM2).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] =
|
|
|
|
known_registers[X64Register::XMM(XMM::XMM1).to_index().0].unwrap_or(0);
|
|
|
|
|
|
|
|
stack_offset -= 1;
|
|
|
|
stack[stack_offset] =
|
|
|
|
known_registers[X64Register::XMM(XMM::XMM0).to_index().0].unwrap_or(0);
|
|
|
|
|
2019-06-25 17:38:39 +00:00
|
|
|
if let Some(ref memory) = image.memory {
|
|
|
|
assert!(vmctx.internal.memory_bound <= memory.len());
|
2019-06-25 17:39:30 +00:00
|
|
|
|
2019-06-25 17:38:39 +00:00
|
|
|
if vmctx.internal.memory_bound < memory.len() {
|
2019-06-25 17:39:30 +00:00
|
|
|
let grow: unsafe extern "C" fn(ctx: &mut Ctx, memory_index: usize, delta: usize) =
|
|
|
|
::std::mem::transmute((*vmctx.internal.intrinsics).memory_grow);
|
|
|
|
grow(
|
|
|
|
vmctx,
|
|
|
|
0,
|
|
|
|
(memory.len() - vmctx.internal.memory_bound) / 65536,
|
|
|
|
);
|
2019-06-25 17:38:39 +00:00
|
|
|
assert_eq!(vmctx.internal.memory_bound, memory.len());
|
|
|
|
}
|
|
|
|
|
2019-06-25 17:39:30 +00:00
|
|
|
::std::slice::from_raw_parts_mut(
|
|
|
|
vmctx.internal.memory_base,
|
|
|
|
vmctx.internal.memory_bound,
|
|
|
|
)
|
|
|
|
.copy_from_slice(memory);
|
2019-06-25 17:38:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let globals_len = (*vmctx.module).info.globals.len();
|
|
|
|
for i in 0..globals_len {
|
2019-06-25 17:39:30 +00:00
|
|
|
(*(*vmctx.local_backing).globals[LocalGlobalIndex::new(i)].vm_local_global()).data =
|
|
|
|
image.globals[i];
|
2019-06-25 17:38:39 +00:00
|
|
|
}
|
|
|
|
|
2019-06-26 16:41:07 +00:00
|
|
|
drop(image); // free up host memory
|
|
|
|
|
2019-06-27 07:49:43 +00:00
|
|
|
catch_unsafe_unwind(
|
|
|
|
|| {
|
|
|
|
run_on_alternative_stack(
|
|
|
|
stack.as_mut_ptr().offset(stack.len() as isize),
|
|
|
|
stack.as_mut_ptr().offset(stack_offset as isize),
|
|
|
|
)
|
|
|
|
},
|
|
|
|
breakpoints,
|
|
|
|
)
|
2019-06-25 12:01:56 +00:00
|
|
|
}
|
|
|
|
|
2019-06-25 17:38:39 +00:00
|
|
|
pub fn build_instance_image(
|
|
|
|
vmctx: &mut Ctx,
|
|
|
|
execution_state: ExecutionStateImage,
|
|
|
|
) -> InstanceImage {
|
|
|
|
unsafe {
|
|
|
|
let memory = if vmctx.internal.memory_base.is_null() {
|
|
|
|
None
|
|
|
|
} else {
|
2019-06-25 17:39:30 +00:00
|
|
|
Some(
|
|
|
|
::std::slice::from_raw_parts(
|
|
|
|
vmctx.internal.memory_base,
|
|
|
|
vmctx.internal.memory_bound,
|
|
|
|
)
|
|
|
|
.to_vec(),
|
|
|
|
)
|
2019-06-25 17:38:39 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// FIXME: Imported globals
|
|
|
|
let globals_len = (*vmctx.module).info.globals.len();
|
2019-06-25 17:39:30 +00:00
|
|
|
let globals: Vec<u64> = (0..globals_len)
|
|
|
|
.map(|i| {
|
|
|
|
(*vmctx.local_backing).globals[LocalGlobalIndex::new(i)]
|
|
|
|
.get()
|
|
|
|
.to_u64()
|
|
|
|
})
|
|
|
|
.collect();
|
2019-06-25 17:38:39 +00:00
|
|
|
|
|
|
|
InstanceImage {
|
|
|
|
memory: memory,
|
|
|
|
globals: globals,
|
|
|
|
execution_state: execution_state,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-12 14:02:15 +00:00
|
|
|
#[warn(unused_variables)]
|
2019-06-24 19:56:20 +00:00
|
|
|
pub unsafe fn read_stack(
|
|
|
|
msm: &ModuleStateMap,
|
|
|
|
code_base: usize,
|
|
|
|
mut stack: *const u64,
|
|
|
|
initially_known_registers: [Option<u64>; 24],
|
|
|
|
mut initial_address: Option<u64>,
|
2019-06-25 17:38:39 +00:00
|
|
|
) -> ExecutionStateImage {
|
2019-06-24 19:55:33 +00:00
|
|
|
let mut known_registers: [Option<u64>; 24] = initially_known_registers;
|
|
|
|
let mut results: Vec<WasmFunctionStateDump> = vec![];
|
|
|
|
|
|
|
|
for _ in 0.. {
|
|
|
|
let ret_addr = initial_address.take().unwrap_or_else(|| {
|
|
|
|
let x = *stack;
|
|
|
|
stack = stack.offset(1);
|
|
|
|
x
|
|
|
|
});
|
2019-06-24 19:56:20 +00:00
|
|
|
let (fsm, state) = match msm
|
|
|
|
.lookup_call_ip(ret_addr as usize, code_base)
|
2019-06-24 19:55:33 +00:00
|
|
|
.or_else(|| msm.lookup_trappable_ip(ret_addr as usize, code_base))
|
2019-06-27 07:49:43 +00:00
|
|
|
.or_else(|| msm.lookup_loop_ip(ret_addr as usize, code_base))
|
2019-06-24 19:55:33 +00:00
|
|
|
{
|
2019-06-12 14:02:15 +00:00
|
|
|
Some(x) => x,
|
2019-06-25 17:39:30 +00:00
|
|
|
_ => return ExecutionStateImage { frames: results },
|
2019-06-12 14:02:15 +00:00
|
|
|
};
|
2019-06-12 15:54:15 +00:00
|
|
|
|
2019-06-24 19:56:20 +00:00
|
|
|
let mut wasm_stack: Vec<Option<u64>> = state
|
|
|
|
.wasm_stack
|
|
|
|
.iter()
|
2019-06-24 19:55:33 +00:00
|
|
|
.map(|x| match *x {
|
|
|
|
WasmAbstractValue::Const(x) => Some(x),
|
|
|
|
WasmAbstractValue::Runtime => None,
|
2019-06-24 19:56:20 +00:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
let mut wasm_locals: Vec<Option<u64>> = fsm
|
|
|
|
.locals
|
|
|
|
.iter()
|
2019-06-24 19:55:33 +00:00
|
|
|
.map(|x| match *x {
|
|
|
|
WasmAbstractValue::Const(x) => Some(x),
|
|
|
|
WasmAbstractValue::Runtime => None,
|
2019-06-24 19:56:20 +00:00
|
|
|
})
|
|
|
|
.collect();
|
2019-06-24 19:55:33 +00:00
|
|
|
|
2019-06-12 15:54:15 +00:00
|
|
|
// This must be before the next loop because that modifies `known_registers`.
|
|
|
|
for (i, v) in state.register_values.iter().enumerate() {
|
|
|
|
match *v {
|
|
|
|
MachineValue::Undefined => {}
|
2019-06-25 12:01:56 +00:00
|
|
|
MachineValue::Vmctx => {}
|
2019-06-12 15:54:15 +00:00
|
|
|
MachineValue::WasmStack(idx) => {
|
|
|
|
if let Some(v) = known_registers[i] {
|
2019-06-24 19:55:33 +00:00
|
|
|
wasm_stack[idx] = Some(v);
|
2019-06-25 17:38:39 +00:00
|
|
|
} else {
|
2019-06-25 17:39:30 +00:00
|
|
|
eprintln!(
|
|
|
|
"BUG: Register {} for WebAssembly stack slot {} has unknown value.",
|
|
|
|
i, idx
|
|
|
|
);
|
2019-06-12 15:54:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
MachineValue::WasmLocal(idx) => {
|
|
|
|
if let Some(v) = known_registers[i] {
|
2019-06-24 19:55:33 +00:00
|
|
|
wasm_locals[idx] = Some(v);
|
2019-06-12 15:54:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-12 14:02:15 +00:00
|
|
|
let mut found_shadow = false;
|
2019-06-24 19:55:33 +00:00
|
|
|
for v in state.stack_values.iter() {
|
|
|
|
match *v {
|
|
|
|
MachineValue::ExplicitShadow => {
|
|
|
|
found_shadow = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found_shadow {
|
|
|
|
stack = stack.offset((fsm.shadow_size / 8) as isize);
|
|
|
|
}
|
|
|
|
|
2019-06-12 14:02:15 +00:00
|
|
|
for v in state.stack_values.iter().rev() {
|
|
|
|
match *v {
|
|
|
|
MachineValue::ExplicitShadow => {
|
|
|
|
stack = stack.offset((fsm.shadow_size / 8) as isize);
|
|
|
|
}
|
|
|
|
MachineValue::Undefined => {
|
|
|
|
stack = stack.offset(1);
|
|
|
|
}
|
2019-06-25 12:01:56 +00:00
|
|
|
MachineValue::Vmctx => {
|
|
|
|
stack = stack.offset(1);
|
|
|
|
}
|
2019-06-12 14:02:15 +00:00
|
|
|
MachineValue::PreserveRegister(idx) => {
|
2019-06-12 15:54:15 +00:00
|
|
|
known_registers[idx.0] = Some(*stack);
|
2019-06-12 14:02:15 +00:00
|
|
|
stack = stack.offset(1);
|
|
|
|
}
|
2019-06-26 16:41:07 +00:00
|
|
|
MachineValue::CopyStackBPRelative(_) => {
|
2019-06-12 14:02:15 +00:00
|
|
|
stack = stack.offset(1);
|
|
|
|
}
|
|
|
|
MachineValue::WasmStack(idx) => {
|
2019-06-24 19:55:33 +00:00
|
|
|
wasm_stack[idx] = Some(*stack);
|
2019-06-12 14:02:15 +00:00
|
|
|
stack = stack.offset(1);
|
|
|
|
}
|
|
|
|
MachineValue::WasmLocal(idx) => {
|
2019-06-24 19:55:33 +00:00
|
|
|
wasm_locals[idx] = Some(*stack);
|
2019-06-12 14:02:15 +00:00
|
|
|
stack = stack.offset(1);
|
|
|
|
}
|
2019-07-24 18:44:28 +00:00
|
|
|
MachineValue::TwoHalves(ref inner) => {
|
|
|
|
let v = *stack;
|
|
|
|
stack = stack.offset(1);
|
|
|
|
match inner.0 {
|
|
|
|
MachineValue::WasmStack(idx) => {
|
|
|
|
wasm_stack[idx] = Some(v & 0xffffffffu64);
|
|
|
|
}
|
|
|
|
MachineValue::WasmLocal(idx) => {
|
|
|
|
wasm_locals[idx] = Some(v & 0xffffffffu64);
|
|
|
|
}
|
|
|
|
MachineValue::Undefined => {},
|
|
|
|
_ => unimplemented!("TwoHalves.0 (read)")
|
|
|
|
}
|
|
|
|
match inner.1 {
|
|
|
|
MachineValue::WasmStack(idx) => {
|
|
|
|
wasm_stack[idx] = Some(v >> 32);
|
|
|
|
}
|
|
|
|
MachineValue::WasmLocal(idx) => {
|
|
|
|
wasm_locals[idx] = Some(v >> 32);
|
|
|
|
}
|
|
|
|
MachineValue::Undefined => {},
|
|
|
|
_ => unimplemented!("TwoHalves.1 (read)")
|
|
|
|
}
|
|
|
|
}
|
2019-06-12 14:02:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
stack = stack.offset(1); // RBP
|
|
|
|
|
2019-06-24 19:56:20 +00:00
|
|
|
wasm_stack.truncate(
|
|
|
|
wasm_stack
|
|
|
|
.len()
|
|
|
|
.checked_sub(state.wasm_stack_private_depth)
|
|
|
|
.unwrap(),
|
|
|
|
);
|
2019-06-24 19:55:33 +00:00
|
|
|
|
|
|
|
let wfs = WasmFunctionStateDump {
|
|
|
|
local_function_id: fsm.local_function_id,
|
2019-06-25 12:01:56 +00:00
|
|
|
wasm_inst_offset: state.wasm_inst_offset,
|
2019-06-24 19:55:33 +00:00
|
|
|
stack: wasm_stack,
|
|
|
|
locals: wasm_locals,
|
2019-06-12 14:02:15 +00:00
|
|
|
};
|
2019-06-24 19:55:33 +00:00
|
|
|
results.push(wfs);
|
2019-06-12 05:38:58 +00:00
|
|
|
}
|
2019-06-24 19:55:33 +00:00
|
|
|
|
|
|
|
unreachable!();
|
2019-06-12 05:38:58 +00:00
|
|
|
}
|
|
|
|
|
2019-06-09 13:21:18 +00:00
|
|
|
#[repr(u8)]
|
|
|
|
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
|
|
|
pub enum GPR {
|
|
|
|
RAX,
|
|
|
|
RCX,
|
|
|
|
RDX,
|
|
|
|
RBX,
|
|
|
|
RSP,
|
|
|
|
RBP,
|
|
|
|
RSI,
|
|
|
|
RDI,
|
|
|
|
R8,
|
|
|
|
R9,
|
|
|
|
R10,
|
|
|
|
R11,
|
|
|
|
R12,
|
|
|
|
R13,
|
|
|
|
R14,
|
|
|
|
R15,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[repr(u8)]
|
|
|
|
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
|
|
|
pub enum XMM {
|
|
|
|
XMM0,
|
|
|
|
XMM1,
|
|
|
|
XMM2,
|
|
|
|
XMM3,
|
|
|
|
XMM4,
|
|
|
|
XMM5,
|
|
|
|
XMM6,
|
|
|
|
XMM7,
|
|
|
|
}
|
|
|
|
|
2019-07-22 18:55:43 +00:00
|
|
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
2019-06-09 13:21:18 +00:00
|
|
|
pub enum X64Register {
|
|
|
|
GPR(GPR),
|
|
|
|
XMM(XMM),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl X64Register {
|
|
|
|
pub fn to_index(&self) -> RegisterIndex {
|
|
|
|
match *self {
|
|
|
|
X64Register::GPR(x) => RegisterIndex(x as usize),
|
2019-06-11 12:49:30 +00:00
|
|
|
X64Register::XMM(x) => RegisterIndex(x as usize + 16),
|
2019-06-09 13:21:18 +00:00
|
|
|
}
|
|
|
|
}
|
2019-07-22 18:55:43 +00:00
|
|
|
|
|
|
|
pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
|
|
|
|
Some(match x {
|
|
|
|
0 => X64Register::GPR(GPR::RAX),
|
|
|
|
1 => X64Register::GPR(GPR::RDX),
|
|
|
|
2 => X64Register::GPR(GPR::RCX),
|
|
|
|
3 => X64Register::GPR(GPR::RBX),
|
|
|
|
4 => X64Register::GPR(GPR::RSI),
|
|
|
|
5 => X64Register::GPR(GPR::RDI),
|
|
|
|
6 => X64Register::GPR(GPR::RBP),
|
|
|
|
7 => X64Register::GPR(GPR::RSP),
|
|
|
|
8 => X64Register::GPR(GPR::R8),
|
|
|
|
9 => X64Register::GPR(GPR::R9),
|
|
|
|
10 => X64Register::GPR(GPR::R10),
|
|
|
|
11 => X64Register::GPR(GPR::R11),
|
|
|
|
12 => X64Register::GPR(GPR::R12),
|
|
|
|
13 => X64Register::GPR(GPR::R13),
|
|
|
|
14 => X64Register::GPR(GPR::R14),
|
|
|
|
15 => X64Register::GPR(GPR::R15),
|
|
|
|
|
|
|
|
17 => X64Register::XMM(XMM::XMM0),
|
|
|
|
18 => X64Register::XMM(XMM::XMM1),
|
|
|
|
19 => X64Register::XMM(XMM::XMM2),
|
|
|
|
20 => X64Register::XMM(XMM::XMM3),
|
|
|
|
21 => X64Register::XMM(XMM::XMM4),
|
|
|
|
22 => X64Register::XMM(XMM::XMM5),
|
|
|
|
23 => X64Register::XMM(XMM::XMM6),
|
|
|
|
24 => X64Register::XMM(XMM::XMM7),
|
|
|
|
_ => return None,
|
|
|
|
})
|
|
|
|
}
|
2019-06-09 13:21:18 +00:00
|
|
|
}
|
|
|
|
}
|