wasmer/lib/dynasm-backend/src/machine.rs

451 lines
13 KiB
Rust
Raw Normal View History

2019-03-28 02:17:06 +00:00
use crate::emitter_x64::*;
use std::collections::HashSet;
use wasmparser::Type as WpType;
2019-03-29 04:06:28 +00:00
struct MachineStackOffset(usize);
pub struct Machine {
2019-03-28 02:17:06 +00:00
used_gprs: HashSet<GPR>,
used_xmms: HashSet<XMM>,
2019-04-02 12:50:56 +00:00
stack_offset: MachineStackOffset,
save_area_offset: Option<MachineStackOffset>,
2019-03-28 02:17:06 +00:00
}
2019-03-29 04:06:28 +00:00
impl Machine {
pub fn new() -> Self {
Machine {
used_gprs: HashSet::new(),
used_xmms: HashSet::new(),
stack_offset: MachineStackOffset(0),
2019-04-02 12:50:56 +00:00
save_area_offset: None,
2019-03-29 04:06:28 +00:00
}
}
2019-03-28 02:17:06 +00:00
pub fn get_stack_offset(&self) -> usize {
self.stack_offset.0
}
2019-04-01 17:16:37 +00:00
pub fn get_used_gprs(&self) -> Vec<GPR> {
self.used_gprs.iter().cloned().collect()
}
pub fn get_used_xmms(&self) -> Vec<XMM> {
self.used_xmms.iter().cloned().collect()
}
2019-04-02 12:50:56 +00:00
pub fn get_vmctx_reg() -> GPR {
GPR::R15
}
2019-03-29 04:06:28 +00:00
/// Picks an unused general purpose register for local/stack/argument use.
2019-03-28 02:17:06 +00:00
///
/// This method does not mark the register as used.
pub fn pick_gpr(&self) -> Option<GPR> {
use GPR::*;
static REGS: &'static [GPR] = &[
RSI,
RDI,
R8,
R9,
];
for r in REGS {
if !self.used_gprs.contains(r) {
return Some(*r)
}
}
None
}
2019-03-29 04:06:28 +00:00
/// Picks an unused general purpose register for internal temporary use.
///
/// This method does not mark the register as used.
pub fn pick_temp_gpr(&self) -> Option<GPR> {
use GPR::*;
static REGS: &'static [GPR] = &[
RAX,
2019-03-30 16:44:02 +00:00
RCX,
RDX,
2019-03-29 04:06:28 +00:00
];
for r in REGS {
if !self.used_gprs.contains(r) {
return Some(*r)
}
}
None
}
/// Acquires a temporary GPR.
pub fn acquire_temp_gpr(&mut self) -> Option<GPR> {
let gpr = self.pick_temp_gpr();
if let Some(x) = gpr {
self.used_gprs.insert(x);
}
gpr
}
/// Releases a temporary GPR.
pub fn release_temp_gpr(&mut self, gpr: GPR) {
assert_eq!(self.used_gprs.remove(&gpr), true);
}
2019-03-28 02:17:06 +00:00
/// Picks an unused XMM register.
///
/// This method does not mark the register as used.
pub fn pick_xmm(&self) -> Option<XMM> {
use XMM::*;
static REGS: &'static [XMM] = &[
XMM3,
XMM4,
XMM5,
XMM6,
XMM7,
];
for r in REGS {
if !self.used_xmms.contains(r) {
return Some(*r)
}
}
None
}
2019-03-29 04:06:28 +00:00
/// Picks an unused XMM register for internal temporary use.
///
/// This method does not mark the register as used.
pub fn pick_temp_xmm(&self) -> Option<XMM> {
use XMM::*;
static REGS: &'static [XMM] = &[
XMM0,
XMM1,
XMM2,
];
for r in REGS {
if !self.used_xmms.contains(r) {
return Some(*r)
}
}
None
}
/// Acquires a temporary XMM register.
pub fn acquire_temp_xmm(&mut self) -> Option<XMM> {
let xmm = self.pick_temp_xmm();
if let Some(x) = xmm {
self.used_xmms.insert(x);
}
xmm
}
/// Releases a temporary XMM register.
pub fn release_temp_xmm(&mut self, xmm: XMM) {
assert_eq!(self.used_xmms.remove(&xmm), true);
}
2019-03-30 16:44:02 +00:00
/// Acquires stack locations from the machine state.
pub fn acquire_stack_locations<E: Emitter>(
&mut self,
assembler: &mut E,
n: usize,
zeroed: bool,
) -> Vec<Location> {
let mut ret = vec![];
let mut delta_stack_offset: usize = 0;
for _ in 0..n {
let loc = {
self.stack_offset.0 += 8;
delta_stack_offset += 8;
Location::Memory(GPR::RBP, -(self.stack_offset.0 as i32))
};
ret.push(loc);
}
2019-04-01 12:33:33 +00:00
if delta_stack_offset != 0 {
assembler.emit_sub(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP));
}
2019-03-30 16:44:02 +00:00
if zeroed {
for i in 0..n {
assembler.emit_mov(Size::S64, Location::Imm32(0), ret[i]);
}
}
ret
}
2019-03-28 02:17:06 +00:00
/// Acquires locations from the machine state.
///
/// If the returned locations are used for stack value, `release_location` needs to be called on them;
/// Otherwise, if the returned locations are used for locals, `release_location` does not need to be called on them.
2019-03-29 04:06:28 +00:00
pub fn acquire_locations<E: Emitter>(
2019-03-28 02:17:06 +00:00
&mut self,
assembler: &mut E,
tys: &[WpType],
2019-03-29 04:06:28 +00:00
zeroed: bool,
2019-03-28 02:17:06 +00:00
) -> Vec<Location> {
let mut ret = vec![];
let mut delta_stack_offset: usize = 0;
for ty in tys {
let loc = match *ty {
WpType::F32 | WpType::F64 => {
self.pick_xmm().map(Location::XMM).or_else(
|| self.pick_gpr().map(Location::GPR)
)
},
WpType::I32 | WpType::I64 => {
self.pick_gpr().map(Location::GPR)
},
_ => unreachable!()
};
let loc = if let Some(x) = loc {
x
} else {
2019-03-29 04:06:28 +00:00
self.stack_offset.0 += 8;
2019-03-28 02:17:06 +00:00
delta_stack_offset += 8;
2019-03-29 04:06:28 +00:00
Location::Memory(GPR::RBP, -(self.stack_offset.0 as i32))
2019-03-28 02:17:06 +00:00
};
if let Location::GPR(x) = loc {
self.used_gprs.insert(x);
} else if let Location::XMM(x) = loc {
self.used_xmms.insert(x);
}
ret.push(loc);
}
2019-04-01 12:33:33 +00:00
if delta_stack_offset != 0 {
assembler.emit_sub(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP));
}
2019-03-29 04:06:28 +00:00
if zeroed {
for i in 0..tys.len() {
2019-03-30 16:44:02 +00:00
assembler.emit_mov(Size::S64, Location::Imm32(0), ret[i]);
2019-03-29 04:06:28 +00:00
}
}
2019-03-28 02:17:06 +00:00
ret
}
/// Releases locations used for stack value.
2019-03-29 04:06:28 +00:00
pub fn release_locations<E: Emitter>(
2019-03-28 02:17:06 +00:00
&mut self,
assembler: &mut E,
locs: &[Location]
) {
let mut delta_stack_offset: usize = 0;
2019-04-02 03:11:45 +00:00
for loc in locs.iter().rev() {
2019-03-28 02:17:06 +00:00
match *loc {
Location::GPR(ref x) => {
assert_eq!(self.used_gprs.remove(x), true);
},
Location::XMM(ref x) => {
assert_eq!(self.used_xmms.remove(x), true);
},
Location::Memory(GPR::RBP, x) => {
if x >= 0 {
unreachable!();
}
let offset = (-x) as usize;
2019-03-29 04:06:28 +00:00
if offset != self.stack_offset.0 {
2019-03-28 02:17:06 +00:00
unreachable!();
}
2019-03-29 04:06:28 +00:00
self.stack_offset.0 -= 8;
2019-03-28 02:17:06 +00:00
delta_stack_offset += 8;
},
_ => {}
}
}
2019-04-01 12:33:33 +00:00
if delta_stack_offset != 0 {
assembler.emit_add(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP));
}
}
2019-04-02 03:11:45 +00:00
pub fn release_locations_only_regs(
&mut self,
locs: &[Location]
) {
for loc in locs.iter().rev() {
match *loc {
Location::GPR(ref x) => {
assert_eq!(self.used_gprs.remove(x), true);
},
Location::XMM(ref x) => {
assert_eq!(self.used_xmms.remove(x), true);
},
_ => {}
}
}
}
pub fn release_locations_only_stack<E: Emitter>(
&mut self,
assembler: &mut E,
locs: &[Location]
) {
let mut delta_stack_offset: usize = 0;
for loc in locs.iter().rev() {
match *loc {
Location::Memory(GPR::RBP, x) => {
if x >= 0 {
unreachable!();
}
let offset = (-x) as usize;
if offset != self.stack_offset.0 {
unreachable!();
}
self.stack_offset.0 -= 8;
delta_stack_offset += 8;
},
_ => {}
}
}
if delta_stack_offset != 0 {
assembler.emit_add(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP));
}
}
2019-04-01 12:33:33 +00:00
pub fn release_locations_keep_state<E: Emitter>(
&self,
assembler: &mut E,
locs: &[Location]
) {
let mut delta_stack_offset: usize = 0;
2019-04-02 03:11:45 +00:00
for loc in locs.iter().rev() {
2019-04-01 12:33:33 +00:00
match *loc {
Location::Memory(GPR::RBP, x) => {
if x >= 0 {
unreachable!();
}
let offset = (-x) as usize;
if offset != self.stack_offset.0 {
unreachable!();
}
delta_stack_offset += 8;
},
_ => {}
}
}
if delta_stack_offset != 0 {
assembler.emit_add(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP));
}
2019-03-28 02:17:06 +00:00
}
2019-04-02 12:50:56 +00:00
pub fn init_locals<E: Emitter>(&mut self, a: &mut E, n: usize, n_params: usize) -> Vec<Location> {
// Use callee-saved registers for locals.
fn get_local_location(idx: usize) -> Location {
match idx {
0 => Location::GPR(GPR::R10),
1 => Location::GPR(GPR::R11),
2 => Location::GPR(GPR::R12),
3 => Location::GPR(GPR::R13),
4 => Location::GPR(GPR::R14),
_ => Location::Memory(GPR::RBP, -(((idx - 4) * 8) as i32)),
}
}
let mut locations: Vec<Location> = vec! [];
let mut allocated: usize = 0;
// Determine locations for parameters.
for i in 0..n_params {
let loc = Self::get_param_location(i + 1);
locations.push(match loc {
Location::GPR(x) => {
let old_idx = allocated;
allocated += 1;
get_local_location(old_idx)
},
Location::Memory(_, _) => loc,
_ => unreachable!(),
});
}
// Determine locations for normal locals.
for i in n_params..n {
locations.push(get_local_location(allocated));
allocated += 1;
}
// How many machine stack slots did all the locals use?
let num_mem_slots = locations.iter().filter(|&&loc| {
match loc {
Location::Memory(_, _) => true,
_ => false,
}
}).count();
// Move RSP down to reserve space for machine stack slots.
if num_mem_slots > 0 {
a.emit_sub(Size::S64, Location::Imm32((num_mem_slots * 8) as u32), Location::GPR(GPR::RSP));
self.stack_offset.0 += num_mem_slots * 8;
}
// Save callee-saved registers.
for loc in locations.iter() {
if let Location::GPR(x) = *loc {
a.emit_push(Size::S64, *loc);
self.stack_offset.0 += 8;
}
}
// Save R15 for vmctx use.
a.emit_push(Size::S64, Location::GPR(GPR::R15));
self.stack_offset.0 += 8;
// Save the offset of static area.
self.save_area_offset = Some(MachineStackOffset(self.stack_offset.0));
// Load in-register parameters into the allocated locations.
for i in 0..n_params {
let loc = Self::get_param_location(i + 1);
match loc {
Location::GPR(x) => {
a.emit_mov(Size::S64, loc, locations[i]);
},
_ => break
}
}
// Load vmctx.
a.emit_mov(Size::S64, Self::get_param_location(0), Location::GPR(GPR::R15));
// Initialize all normal locals to zero.
for i in n_params..n {
a.emit_mov(Size::S64, Location::Imm32(0), locations[i]);
}
locations
}
pub fn finalize_locals<E: Emitter>(&mut self, a: &mut E, locations: &[Location]) {
// Unwind stack to the "save area".
a.emit_lea(Size::S64, Location::Memory(GPR::RBP, -(self.save_area_offset.as_ref().unwrap().0 as i32)), Location::GPR(GPR::RSP));
// Restore R15 used by vmctx.
a.emit_pop(Size::S64, Location::GPR(GPR::R15));
// Restore callee-saved registers.
for loc in locations.iter().rev() {
if let Location::GPR(x) = *loc {
a.emit_pop(Size::S64, *loc);
}
}
}
pub fn get_param_location(
idx: usize
) -> Location {
match idx {
0 => Location::GPR(GPR::RDI),
1 => Location::GPR(GPR::RSI),
2 => Location::GPR(GPR::RDX),
3 => Location::GPR(GPR::RCX),
4 => Location::GPR(GPR::R8),
5 => Location::GPR(GPR::R9),
_ => Location::Memory(GPR::RBP, (16 + (idx - 6) * 8) as i32),
}
}
2019-03-28 02:17:06 +00:00
}