2019-03-28 02:17:06 +00:00
|
|
|
use crate::emitter_x64::*;
|
2019-04-11 01:43:02 +00:00
|
|
|
use smallvec::SmallVec;
|
2019-03-28 02:17:06 +00:00
|
|
|
use std::collections::HashSet;
|
|
|
|
use wasmparser::Type as WpType;
|
|
|
|
|
2019-03-29 04:06:28 +00:00
|
|
|
struct MachineStackOffset(usize);
|
|
|
|
|
|
|
|
pub struct Machine {
|
2019-03-28 02:17:06 +00:00
|
|
|
used_gprs: HashSet<GPR>,
|
|
|
|
used_xmms: HashSet<XMM>,
|
2019-04-02 12:50:56 +00:00
|
|
|
stack_offset: MachineStackOffset,
|
|
|
|
save_area_offset: Option<MachineStackOffset>,
|
2019-03-28 02:17:06 +00:00
|
|
|
}
|
|
|
|
|
2019-03-29 04:06:28 +00:00
|
|
|
impl Machine {
|
|
|
|
pub fn new() -> Self {
|
|
|
|
Machine {
|
|
|
|
used_gprs: HashSet::new(),
|
|
|
|
used_xmms: HashSet::new(),
|
|
|
|
stack_offset: MachineStackOffset(0),
|
2019-04-02 12:50:56 +00:00
|
|
|
save_area_offset: None,
|
2019-03-29 04:06:28 +00:00
|
|
|
}
|
|
|
|
}
|
2019-03-28 02:17:06 +00:00
|
|
|
|
2019-04-04 02:50:48 +00:00
|
|
|
pub fn get_stack_offset(&self) -> usize {
|
|
|
|
self.stack_offset.0
|
|
|
|
}
|
|
|
|
|
2019-04-01 17:16:37 +00:00
|
|
|
pub fn get_used_gprs(&self) -> Vec<GPR> {
|
|
|
|
self.used_gprs.iter().cloned().collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_used_xmms(&self) -> Vec<XMM> {
|
|
|
|
self.used_xmms.iter().cloned().collect()
|
|
|
|
}
|
|
|
|
|
2019-04-02 12:50:56 +00:00
|
|
|
pub fn get_vmctx_reg() -> GPR {
|
|
|
|
GPR::R15
|
|
|
|
}
|
|
|
|
|
2019-03-29 04:06:28 +00:00
|
|
|
/// Picks an unused general purpose register for local/stack/argument use.
|
2019-04-11 01:43:02 +00:00
|
|
|
///
|
2019-03-28 02:17:06 +00:00
|
|
|
/// This method does not mark the register as used.
|
|
|
|
pub fn pick_gpr(&self) -> Option<GPR> {
|
|
|
|
use GPR::*;
|
2019-04-11 01:43:02 +00:00
|
|
|
static REGS: &'static [GPR] = &[RSI, RDI, R8, R9, R10, R11];
|
2019-03-28 02:17:06 +00:00
|
|
|
for r in REGS {
|
|
|
|
if !self.used_gprs.contains(r) {
|
2019-04-11 01:43:02 +00:00
|
|
|
return Some(*r);
|
2019-03-28 02:17:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2019-03-29 04:06:28 +00:00
|
|
|
/// Picks an unused general purpose register for internal temporary use.
|
2019-04-11 01:43:02 +00:00
|
|
|
///
|
2019-03-29 04:06:28 +00:00
|
|
|
/// This method does not mark the register as used.
|
|
|
|
pub fn pick_temp_gpr(&self) -> Option<GPR> {
|
|
|
|
use GPR::*;
|
2019-04-11 01:43:02 +00:00
|
|
|
static REGS: &'static [GPR] = &[RAX, RCX, RDX];
|
2019-03-29 04:06:28 +00:00
|
|
|
for r in REGS {
|
|
|
|
if !self.used_gprs.contains(r) {
|
2019-04-11 01:43:02 +00:00
|
|
|
return Some(*r);
|
2019-03-29 04:06:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Acquires a temporary GPR.
|
|
|
|
pub fn acquire_temp_gpr(&mut self) -> Option<GPR> {
|
|
|
|
let gpr = self.pick_temp_gpr();
|
|
|
|
if let Some(x) = gpr {
|
|
|
|
self.used_gprs.insert(x);
|
|
|
|
}
|
|
|
|
gpr
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Releases a temporary GPR.
|
|
|
|
pub fn release_temp_gpr(&mut self, gpr: GPR) {
|
|
|
|
assert_eq!(self.used_gprs.remove(&gpr), true);
|
|
|
|
}
|
|
|
|
|
2019-03-28 02:17:06 +00:00
|
|
|
/// Picks an unused XMM register.
|
2019-04-11 01:43:02 +00:00
|
|
|
///
|
2019-03-28 02:17:06 +00:00
|
|
|
/// This method does not mark the register as used.
|
|
|
|
pub fn pick_xmm(&self) -> Option<XMM> {
|
|
|
|
use XMM::*;
|
2019-04-11 01:43:02 +00:00
|
|
|
static REGS: &'static [XMM] = &[XMM3, XMM4, XMM5, XMM6, XMM7];
|
2019-03-28 02:17:06 +00:00
|
|
|
for r in REGS {
|
|
|
|
if !self.used_xmms.contains(r) {
|
2019-04-11 01:43:02 +00:00
|
|
|
return Some(*r);
|
2019-03-28 02:17:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2019-03-29 04:06:28 +00:00
|
|
|
/// Picks an unused XMM register for internal temporary use.
|
2019-04-11 01:43:02 +00:00
|
|
|
///
|
2019-03-29 04:06:28 +00:00
|
|
|
/// This method does not mark the register as used.
|
|
|
|
pub fn pick_temp_xmm(&self) -> Option<XMM> {
|
|
|
|
use XMM::*;
|
2019-04-11 01:43:02 +00:00
|
|
|
static REGS: &'static [XMM] = &[XMM0, XMM1, XMM2];
|
2019-03-29 04:06:28 +00:00
|
|
|
for r in REGS {
|
|
|
|
if !self.used_xmms.contains(r) {
|
2019-04-11 01:43:02 +00:00
|
|
|
return Some(*r);
|
2019-03-29 04:06:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Acquires a temporary XMM register.
|
|
|
|
pub fn acquire_temp_xmm(&mut self) -> Option<XMM> {
|
|
|
|
let xmm = self.pick_temp_xmm();
|
|
|
|
if let Some(x) = xmm {
|
|
|
|
self.used_xmms.insert(x);
|
|
|
|
}
|
|
|
|
xmm
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Releases a temporary XMM register.
|
|
|
|
pub fn release_temp_xmm(&mut self, xmm: XMM) {
|
|
|
|
assert_eq!(self.used_xmms.remove(&xmm), true);
|
|
|
|
}
|
|
|
|
|
2019-03-28 02:17:06 +00:00
|
|
|
/// Acquires locations from the machine state.
|
2019-04-11 01:43:02 +00:00
|
|
|
///
|
2019-03-28 02:17:06 +00:00
|
|
|
/// If the returned locations are used for stack value, `release_location` needs to be called on them;
|
|
|
|
/// Otherwise, if the returned locations are used for locals, `release_location` does not need to be called on them.
|
2019-03-29 04:06:28 +00:00
|
|
|
pub fn acquire_locations<E: Emitter>(
|
2019-03-28 02:17:06 +00:00
|
|
|
&mut self,
|
|
|
|
assembler: &mut E,
|
|
|
|
tys: &[WpType],
|
2019-03-29 04:06:28 +00:00
|
|
|
zeroed: bool,
|
2019-04-11 01:41:18 +00:00
|
|
|
) -> SmallVec<[Location; 1]> {
|
|
|
|
let mut ret = smallvec![];
|
2019-03-28 02:17:06 +00:00
|
|
|
let mut delta_stack_offset: usize = 0;
|
|
|
|
|
|
|
|
for ty in tys {
|
|
|
|
let loc = match *ty {
|
2019-04-11 01:43:02 +00:00
|
|
|
WpType::F32 | WpType::F64 => self.pick_xmm().map(Location::XMM),
|
|
|
|
WpType::I32 | WpType::I64 => self.pick_gpr().map(Location::GPR),
|
|
|
|
_ => unreachable!(),
|
2019-03-28 02:17:06 +00:00
|
|
|
};
|
2019-04-11 01:43:02 +00:00
|
|
|
|
2019-03-28 02:17:06 +00:00
|
|
|
let loc = if let Some(x) = loc {
|
|
|
|
x
|
|
|
|
} else {
|
2019-03-29 04:06:28 +00:00
|
|
|
self.stack_offset.0 += 8;
|
2019-03-28 02:17:06 +00:00
|
|
|
delta_stack_offset += 8;
|
2019-03-29 04:06:28 +00:00
|
|
|
Location::Memory(GPR::RBP, -(self.stack_offset.0 as i32))
|
2019-03-28 02:17:06 +00:00
|
|
|
};
|
|
|
|
if let Location::GPR(x) = loc {
|
|
|
|
self.used_gprs.insert(x);
|
|
|
|
} else if let Location::XMM(x) = loc {
|
|
|
|
self.used_xmms.insert(x);
|
|
|
|
}
|
|
|
|
ret.push(loc);
|
|
|
|
}
|
|
|
|
|
2019-04-01 12:33:33 +00:00
|
|
|
if delta_stack_offset != 0 {
|
2019-04-11 01:43:02 +00:00
|
|
|
assembler.emit_sub(
|
|
|
|
Size::S64,
|
|
|
|
Location::Imm32(delta_stack_offset as u32),
|
|
|
|
Location::GPR(GPR::RSP),
|
|
|
|
);
|
2019-04-01 12:33:33 +00:00
|
|
|
}
|
2019-03-29 04:06:28 +00:00
|
|
|
if zeroed {
|
|
|
|
for i in 0..tys.len() {
|
2019-03-30 16:44:02 +00:00
|
|
|
assembler.emit_mov(Size::S64, Location::Imm32(0), ret[i]);
|
2019-03-29 04:06:28 +00:00
|
|
|
}
|
|
|
|
}
|
2019-03-28 02:17:06 +00:00
|
|
|
ret
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Releases locations used for stack value.
|
2019-04-11 01:43:02 +00:00
|
|
|
pub fn release_locations<E: Emitter>(&mut self, assembler: &mut E, locs: &[Location]) {
|
2019-03-28 02:17:06 +00:00
|
|
|
let mut delta_stack_offset: usize = 0;
|
|
|
|
|
2019-04-02 03:11:45 +00:00
|
|
|
for loc in locs.iter().rev() {
|
2019-03-28 02:17:06 +00:00
|
|
|
match *loc {
|
|
|
|
Location::GPR(ref x) => {
|
|
|
|
assert_eq!(self.used_gprs.remove(x), true);
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
2019-03-28 02:17:06 +00:00
|
|
|
Location::XMM(ref x) => {
|
|
|
|
assert_eq!(self.used_xmms.remove(x), true);
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
2019-03-28 02:17:06 +00:00
|
|
|
Location::Memory(GPR::RBP, x) => {
|
|
|
|
if x >= 0 {
|
|
|
|
unreachable!();
|
|
|
|
}
|
|
|
|
let offset = (-x) as usize;
|
2019-03-29 04:06:28 +00:00
|
|
|
if offset != self.stack_offset.0 {
|
2019-03-28 02:17:06 +00:00
|
|
|
unreachable!();
|
|
|
|
}
|
2019-03-29 04:06:28 +00:00
|
|
|
self.stack_offset.0 -= 8;
|
2019-03-28 02:17:06 +00:00
|
|
|
delta_stack_offset += 8;
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
2019-03-28 02:17:06 +00:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-01 12:33:33 +00:00
|
|
|
if delta_stack_offset != 0 {
|
2019-04-11 01:43:02 +00:00
|
|
|
assembler.emit_add(
|
|
|
|
Size::S64,
|
|
|
|
Location::Imm32(delta_stack_offset as u32),
|
|
|
|
Location::GPR(GPR::RSP),
|
|
|
|
);
|
2019-04-01 12:33:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-11 01:43:02 +00:00
|
|
|
pub fn release_locations_only_regs(&mut self, locs: &[Location]) {
|
2019-04-02 03:11:45 +00:00
|
|
|
for loc in locs.iter().rev() {
|
|
|
|
match *loc {
|
|
|
|
Location::GPR(ref x) => {
|
|
|
|
assert_eq!(self.used_gprs.remove(x), true);
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
2019-04-02 03:11:45 +00:00
|
|
|
Location::XMM(ref x) => {
|
|
|
|
assert_eq!(self.used_xmms.remove(x), true);
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
2019-04-02 03:11:45 +00:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn release_locations_only_stack<E: Emitter>(
|
|
|
|
&mut self,
|
|
|
|
assembler: &mut E,
|
2019-04-11 01:43:02 +00:00
|
|
|
locs: &[Location],
|
2019-04-02 03:11:45 +00:00
|
|
|
) {
|
|
|
|
let mut delta_stack_offset: usize = 0;
|
|
|
|
|
|
|
|
for loc in locs.iter().rev() {
|
|
|
|
match *loc {
|
|
|
|
Location::Memory(GPR::RBP, x) => {
|
|
|
|
if x >= 0 {
|
|
|
|
unreachable!();
|
|
|
|
}
|
|
|
|
let offset = (-x) as usize;
|
|
|
|
if offset != self.stack_offset.0 {
|
|
|
|
unreachable!();
|
|
|
|
}
|
|
|
|
self.stack_offset.0 -= 8;
|
|
|
|
delta_stack_offset += 8;
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
2019-04-02 03:11:45 +00:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if delta_stack_offset != 0 {
|
2019-04-11 01:43:02 +00:00
|
|
|
assembler.emit_add(
|
|
|
|
Size::S64,
|
|
|
|
Location::Imm32(delta_stack_offset as u32),
|
|
|
|
Location::GPR(GPR::RSP),
|
|
|
|
);
|
2019-04-02 03:11:45 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-11 01:43:02 +00:00
|
|
|
pub fn release_locations_keep_state<E: Emitter>(&self, assembler: &mut E, locs: &[Location]) {
|
2019-04-01 12:33:33 +00:00
|
|
|
let mut delta_stack_offset: usize = 0;
|
|
|
|
|
2019-04-02 03:11:45 +00:00
|
|
|
for loc in locs.iter().rev() {
|
2019-04-01 12:33:33 +00:00
|
|
|
match *loc {
|
|
|
|
Location::Memory(GPR::RBP, x) => {
|
|
|
|
if x >= 0 {
|
|
|
|
unreachable!();
|
|
|
|
}
|
|
|
|
let offset = (-x) as usize;
|
|
|
|
if offset != self.stack_offset.0 {
|
|
|
|
unreachable!();
|
|
|
|
}
|
|
|
|
delta_stack_offset += 8;
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
2019-04-01 12:33:33 +00:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if delta_stack_offset != 0 {
|
2019-04-11 01:43:02 +00:00
|
|
|
assembler.emit_add(
|
|
|
|
Size::S64,
|
|
|
|
Location::Imm32(delta_stack_offset as u32),
|
|
|
|
Location::GPR(GPR::RSP),
|
|
|
|
);
|
2019-04-01 12:33:33 +00:00
|
|
|
}
|
2019-03-28 02:17:06 +00:00
|
|
|
}
|
2019-04-02 12:50:56 +00:00
|
|
|
|
2019-04-11 01:43:02 +00:00
|
|
|
pub fn init_locals<E: Emitter>(
|
|
|
|
&mut self,
|
|
|
|
a: &mut E,
|
|
|
|
n: usize,
|
|
|
|
n_params: usize,
|
|
|
|
) -> Vec<Location> {
|
2019-04-02 12:50:56 +00:00
|
|
|
// Use callee-saved registers for locals.
|
|
|
|
fn get_local_location(idx: usize) -> Location {
|
|
|
|
match idx {
|
2019-04-10 18:54:23 +00:00
|
|
|
0 => Location::GPR(GPR::R12),
|
|
|
|
1 => Location::GPR(GPR::R13),
|
|
|
|
2 => Location::GPR(GPR::R14),
|
|
|
|
3 => Location::GPR(GPR::RBX),
|
|
|
|
_ => Location::Memory(GPR::RBP, -(((idx - 3) * 8) as i32)),
|
2019-04-02 12:50:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-11 01:43:02 +00:00
|
|
|
let mut locations: Vec<Location> = vec![];
|
2019-04-02 12:50:56 +00:00
|
|
|
let mut allocated: usize = 0;
|
|
|
|
|
|
|
|
// Determine locations for parameters.
|
|
|
|
for i in 0..n_params {
|
|
|
|
let loc = Self::get_param_location(i + 1);
|
|
|
|
locations.push(match loc {
|
2019-04-11 01:41:18 +00:00
|
|
|
Location::GPR(_) => {
|
2019-04-02 12:50:56 +00:00
|
|
|
let old_idx = allocated;
|
|
|
|
allocated += 1;
|
|
|
|
get_local_location(old_idx)
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
2019-04-02 12:50:56 +00:00
|
|
|
Location::Memory(_, _) => loc,
|
|
|
|
_ => unreachable!(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
// Determine locations for normal locals.
|
2019-04-11 01:41:18 +00:00
|
|
|
for _ in n_params..n {
|
2019-04-02 12:50:56 +00:00
|
|
|
locations.push(get_local_location(allocated));
|
|
|
|
allocated += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
// How many machine stack slots did all the locals use?
|
2019-04-11 01:43:02 +00:00
|
|
|
let num_mem_slots = locations
|
|
|
|
.iter()
|
|
|
|
.filter(|&&loc| match loc {
|
2019-04-02 12:50:56 +00:00
|
|
|
Location::Memory(_, _) => true,
|
|
|
|
_ => false,
|
2019-04-11 01:43:02 +00:00
|
|
|
})
|
|
|
|
.count();
|
2019-04-02 12:50:56 +00:00
|
|
|
|
|
|
|
// Move RSP down to reserve space for machine stack slots.
|
|
|
|
if num_mem_slots > 0 {
|
2019-04-11 01:43:02 +00:00
|
|
|
a.emit_sub(
|
|
|
|
Size::S64,
|
|
|
|
Location::Imm32((num_mem_slots * 8) as u32),
|
|
|
|
Location::GPR(GPR::RSP),
|
|
|
|
);
|
2019-04-02 12:50:56 +00:00
|
|
|
self.stack_offset.0 += num_mem_slots * 8;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Save callee-saved registers.
|
|
|
|
for loc in locations.iter() {
|
2019-04-11 01:41:18 +00:00
|
|
|
if let Location::GPR(_) = *loc {
|
2019-04-02 12:50:56 +00:00
|
|
|
a.emit_push(Size::S64, *loc);
|
|
|
|
self.stack_offset.0 += 8;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Save R15 for vmctx use.
|
|
|
|
a.emit_push(Size::S64, Location::GPR(GPR::R15));
|
|
|
|
self.stack_offset.0 += 8;
|
|
|
|
|
|
|
|
// Save the offset of static area.
|
|
|
|
self.save_area_offset = Some(MachineStackOffset(self.stack_offset.0));
|
|
|
|
|
|
|
|
// Load in-register parameters into the allocated locations.
|
|
|
|
for i in 0..n_params {
|
|
|
|
let loc = Self::get_param_location(i + 1);
|
|
|
|
match loc {
|
2019-04-11 01:41:18 +00:00
|
|
|
Location::GPR(_) => {
|
2019-04-02 12:50:56 +00:00
|
|
|
a.emit_mov(Size::S64, loc, locations[i]);
|
2019-04-11 01:43:02 +00:00
|
|
|
}
|
|
|
|
_ => break,
|
2019-04-02 12:50:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load vmctx.
|
2019-04-11 01:43:02 +00:00
|
|
|
a.emit_mov(
|
|
|
|
Size::S64,
|
|
|
|
Self::get_param_location(0),
|
|
|
|
Location::GPR(GPR::R15),
|
|
|
|
);
|
2019-04-02 12:50:56 +00:00
|
|
|
|
|
|
|
// Initialize all normal locals to zero.
|
|
|
|
for i in n_params..n {
|
|
|
|
a.emit_mov(Size::S64, Location::Imm32(0), locations[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
locations
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn finalize_locals<E: Emitter>(&mut self, a: &mut E, locations: &[Location]) {
|
|
|
|
// Unwind stack to the "save area".
|
2019-04-11 01:43:02 +00:00
|
|
|
a.emit_lea(
|
|
|
|
Size::S64,
|
|
|
|
Location::Memory(
|
|
|
|
GPR::RBP,
|
|
|
|
-(self.save_area_offset.as_ref().unwrap().0 as i32),
|
|
|
|
),
|
|
|
|
Location::GPR(GPR::RSP),
|
|
|
|
);
|
2019-04-02 12:50:56 +00:00
|
|
|
|
|
|
|
// Restore R15 used by vmctx.
|
|
|
|
a.emit_pop(Size::S64, Location::GPR(GPR::R15));
|
|
|
|
|
|
|
|
// Restore callee-saved registers.
|
|
|
|
for loc in locations.iter().rev() {
|
2019-04-11 01:41:18 +00:00
|
|
|
if let Location::GPR(_) = *loc {
|
2019-04-02 12:50:56 +00:00
|
|
|
a.emit_pop(Size::S64, *loc);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-11 01:43:02 +00:00
|
|
|
pub fn get_param_location(idx: usize) -> Location {
|
2019-04-02 12:50:56 +00:00
|
|
|
match idx {
|
|
|
|
0 => Location::GPR(GPR::RDI),
|
|
|
|
1 => Location::GPR(GPR::RSI),
|
|
|
|
2 => Location::GPR(GPR::RDX),
|
|
|
|
3 => Location::GPR(GPR::RCX),
|
|
|
|
4 => Location::GPR(GPR::R8),
|
|
|
|
5 => Location::GPR(GPR::R9),
|
|
|
|
_ => Location::Memory(GPR::RBP, (16 + (idx - 6) * 8) as i32),
|
|
|
|
}
|
|
|
|
}
|
2019-03-28 02:17:06 +00:00
|
|
|
}
|