use crate::emitter_x64::*; use std::collections::HashSet; use wasmparser::Type as WpType; struct MachineStackOffset(usize); pub struct Machine { used_gprs: HashSet, used_xmms: HashSet, stack_offset: MachineStackOffset, save_area_offset: Option, } impl Machine { pub fn new() -> Self { Machine { used_gprs: HashSet::new(), used_xmms: HashSet::new(), stack_offset: MachineStackOffset(0), save_area_offset: None, } } pub fn get_stack_offset(&self) -> usize { self.stack_offset.0 } pub fn get_used_gprs(&self) -> Vec { self.used_gprs.iter().cloned().collect() } pub fn get_used_xmms(&self) -> Vec { self.used_xmms.iter().cloned().collect() } pub fn get_vmctx_reg() -> GPR { GPR::R15 } /// Picks an unused general purpose register for local/stack/argument use. /// /// This method does not mark the register as used. pub fn pick_gpr(&self) -> Option { use GPR::*; static REGS: &'static [GPR] = &[ RSI, RDI, R8, R9, ]; for r in REGS { if !self.used_gprs.contains(r) { return Some(*r) } } None } /// Picks an unused general purpose register for internal temporary use. /// /// This method does not mark the register as used. pub fn pick_temp_gpr(&self) -> Option { use GPR::*; static REGS: &'static [GPR] = &[ RAX, RCX, RDX, ]; for r in REGS { if !self.used_gprs.contains(r) { return Some(*r) } } None } /// Acquires a temporary GPR. pub fn acquire_temp_gpr(&mut self) -> Option { let gpr = self.pick_temp_gpr(); if let Some(x) = gpr { self.used_gprs.insert(x); } gpr } /// Releases a temporary GPR. pub fn release_temp_gpr(&mut self, gpr: GPR) { assert_eq!(self.used_gprs.remove(&gpr), true); } /// Picks an unused XMM register. /// /// This method does not mark the register as used. pub fn pick_xmm(&self) -> Option { use XMM::*; static REGS: &'static [XMM] = &[ XMM3, XMM4, XMM5, XMM6, XMM7, ]; for r in REGS { if !self.used_xmms.contains(r) { return Some(*r) } } None } /// Picks an unused XMM register for internal temporary use. /// /// This method does not mark the register as used. pub fn pick_temp_xmm(&self) -> Option { use XMM::*; static REGS: &'static [XMM] = &[ XMM0, XMM1, XMM2, ]; for r in REGS { if !self.used_xmms.contains(r) { return Some(*r) } } None } /// Acquires a temporary XMM register. pub fn acquire_temp_xmm(&mut self) -> Option { let xmm = self.pick_temp_xmm(); if let Some(x) = xmm { self.used_xmms.insert(x); } xmm } /// Releases a temporary XMM register. pub fn release_temp_xmm(&mut self, xmm: XMM) { assert_eq!(self.used_xmms.remove(&xmm), true); } /// Acquires stack locations from the machine state. pub fn acquire_stack_locations( &mut self, assembler: &mut E, n: usize, zeroed: bool, ) -> Vec { let mut ret = vec![]; let mut delta_stack_offset: usize = 0; for _ in 0..n { let loc = { self.stack_offset.0 += 8; delta_stack_offset += 8; Location::Memory(GPR::RBP, -(self.stack_offset.0 as i32)) }; ret.push(loc); } if delta_stack_offset != 0 { assembler.emit_sub(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP)); } if zeroed { for i in 0..n { assembler.emit_mov(Size::S64, Location::Imm32(0), ret[i]); } } ret } /// Acquires locations from the machine state. /// /// If the returned locations are used for stack value, `release_location` needs to be called on them; /// Otherwise, if the returned locations are used for locals, `release_location` does not need to be called on them. pub fn acquire_locations( &mut self, assembler: &mut E, tys: &[WpType], zeroed: bool, ) -> Vec { let mut ret = vec![]; let mut delta_stack_offset: usize = 0; for ty in tys { let loc = match *ty { WpType::F32 | WpType::F64 => { self.pick_xmm().map(Location::XMM) }, WpType::I32 | WpType::I64 => { self.pick_gpr().map(Location::GPR) }, _ => unreachable!() }; let loc = if let Some(x) = loc { x } else { self.stack_offset.0 += 8; delta_stack_offset += 8; Location::Memory(GPR::RBP, -(self.stack_offset.0 as i32)) }; if let Location::GPR(x) = loc { self.used_gprs.insert(x); } else if let Location::XMM(x) = loc { self.used_xmms.insert(x); } ret.push(loc); } if delta_stack_offset != 0 { assembler.emit_sub(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP)); } if zeroed { for i in 0..tys.len() { assembler.emit_mov(Size::S64, Location::Imm32(0), ret[i]); } } ret } /// Releases locations used for stack value. pub fn release_locations( &mut self, assembler: &mut E, locs: &[Location] ) { let mut delta_stack_offset: usize = 0; for loc in locs.iter().rev() { match *loc { Location::GPR(ref x) => { assert_eq!(self.used_gprs.remove(x), true); }, Location::XMM(ref x) => { assert_eq!(self.used_xmms.remove(x), true); }, Location::Memory(GPR::RBP, x) => { if x >= 0 { unreachable!(); } let offset = (-x) as usize; if offset != self.stack_offset.0 { unreachable!(); } self.stack_offset.0 -= 8; delta_stack_offset += 8; }, _ => {} } } if delta_stack_offset != 0 { assembler.emit_add(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP)); } } pub fn release_locations_only_regs( &mut self, locs: &[Location] ) { for loc in locs.iter().rev() { match *loc { Location::GPR(ref x) => { assert_eq!(self.used_gprs.remove(x), true); }, Location::XMM(ref x) => { assert_eq!(self.used_xmms.remove(x), true); }, _ => {} } } } pub fn release_locations_only_stack( &mut self, assembler: &mut E, locs: &[Location] ) { let mut delta_stack_offset: usize = 0; for loc in locs.iter().rev() { match *loc { Location::Memory(GPR::RBP, x) => { if x >= 0 { unreachable!(); } let offset = (-x) as usize; if offset != self.stack_offset.0 { unreachable!(); } self.stack_offset.0 -= 8; delta_stack_offset += 8; }, _ => {} } } if delta_stack_offset != 0 { assembler.emit_add(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP)); } } pub fn release_locations_keep_state( &self, assembler: &mut E, locs: &[Location] ) { let mut delta_stack_offset: usize = 0; for loc in locs.iter().rev() { match *loc { Location::Memory(GPR::RBP, x) => { if x >= 0 { unreachable!(); } let offset = (-x) as usize; if offset != self.stack_offset.0 { unreachable!(); } delta_stack_offset += 8; }, _ => {} } } if delta_stack_offset != 0 { assembler.emit_add(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP)); } } pub fn init_locals(&mut self, a: &mut E, n: usize, n_params: usize) -> Vec { // Use callee-saved registers for locals. fn get_local_location(idx: usize) -> Location { match idx { 0 => Location::GPR(GPR::R10), 1 => Location::GPR(GPR::R11), 2 => Location::GPR(GPR::R12), 3 => Location::GPR(GPR::R13), 4 => Location::GPR(GPR::R14), _ => Location::Memory(GPR::RBP, -(((idx - 4) * 8) as i32)), } } let mut locations: Vec = vec! []; let mut allocated: usize = 0; // Determine locations for parameters. for i in 0..n_params { let loc = Self::get_param_location(i + 1); locations.push(match loc { Location::GPR(x) => { let old_idx = allocated; allocated += 1; get_local_location(old_idx) }, Location::Memory(_, _) => loc, _ => unreachable!(), }); } // Determine locations for normal locals. for i in n_params..n { locations.push(get_local_location(allocated)); allocated += 1; } // How many machine stack slots did all the locals use? let num_mem_slots = locations.iter().filter(|&&loc| { match loc { Location::Memory(_, _) => true, _ => false, } }).count(); // Move RSP down to reserve space for machine stack slots. if num_mem_slots > 0 { a.emit_sub(Size::S64, Location::Imm32((num_mem_slots * 8) as u32), Location::GPR(GPR::RSP)); self.stack_offset.0 += num_mem_slots * 8; } // Save callee-saved registers. for loc in locations.iter() { if let Location::GPR(x) = *loc { a.emit_push(Size::S64, *loc); self.stack_offset.0 += 8; } } // Save R15 for vmctx use. a.emit_push(Size::S64, Location::GPR(GPR::R15)); self.stack_offset.0 += 8; // Save the offset of static area. self.save_area_offset = Some(MachineStackOffset(self.stack_offset.0)); // Load in-register parameters into the allocated locations. for i in 0..n_params { let loc = Self::get_param_location(i + 1); match loc { Location::GPR(x) => { a.emit_mov(Size::S64, loc, locations[i]); }, _ => break } } // Load vmctx. a.emit_mov(Size::S64, Self::get_param_location(0), Location::GPR(GPR::R15)); // Initialize all normal locals to zero. for i in n_params..n { a.emit_mov(Size::S64, Location::Imm32(0), locations[i]); } locations } pub fn finalize_locals(&mut self, a: &mut E, locations: &[Location]) { // Unwind stack to the "save area". a.emit_lea(Size::S64, Location::Memory(GPR::RBP, -(self.save_area_offset.as_ref().unwrap().0 as i32)), Location::GPR(GPR::RSP)); // Restore R15 used by vmctx. a.emit_pop(Size::S64, Location::GPR(GPR::R15)); // Restore callee-saved registers. for loc in locations.iter().rev() { if let Location::GPR(x) = *loc { a.emit_pop(Size::S64, *loc); } } } pub fn get_param_location( idx: usize ) -> Location { match idx { 0 => Location::GPR(GPR::RDI), 1 => Location::GPR(GPR::RSI), 2 => Location::GPR(GPR::RDX), 3 => Location::GPR(GPR::RCX), 4 => Location::GPR(GPR::R8), 5 => Location::GPR(GPR::R9), _ => Location::Memory(GPR::RBP, (16 + (idx - 6) * 8) as i32), } } }