wasmer/lib/clif-backend/src/resolver.rs

210 lines
8.4 KiB
Rust
Raw Normal View History

2019-01-09 02:57:28 +00:00
use crate::libcalls;
2019-01-17 21:09:05 +00:00
use crate::relocation::{Reloc, RelocSink, Relocation, RelocationType, TrapSink, VmCall};
2019-01-11 03:59:57 +00:00
use byteorder::{ByteOrder, LittleEndian};
2019-01-09 02:57:28 +00:00
use cranelift_codegen::{ir, isa, Context};
use std::mem;
use std::ptr::{write_unaligned, NonNull};
use wasmer_runtime::{
self,
backend::{self, Mmap, Protect},
error::{CompileError, CompileResult},
2019-01-17 01:59:12 +00:00
structures::Map,
types::LocalFuncIndex,
2019-01-09 02:57:28 +00:00
vm, vmcalls,
};
#[allow(dead_code)]
pub struct FuncResolverBuilder {
resolver: FuncResolver,
2019-01-16 18:26:10 +00:00
relocations: Map<LocalFuncIndex, Vec<Relocation>>,
trap_sinks: Map<LocalFuncIndex, TrapSink>,
}
impl FuncResolverBuilder {
2019-01-09 02:57:28 +00:00
pub fn new(
isa: &isa::TargetIsa,
2019-01-17 01:59:12 +00:00
function_bodies: Map<LocalFuncIndex, ir::Function>,
) -> CompileResult<Self> {
let mut compiled_functions: Vec<Vec<u8>> = Vec::with_capacity(function_bodies.len());
let mut relocations = Map::with_capacity(function_bodies.len());
let mut trap_sinks = Map::with_capacity(function_bodies.len());
let mut ctx = Context::new();
let mut total_size = 0;
2019-01-17 01:59:12 +00:00
for (_, func) in function_bodies {
ctx.func = func;
let mut code_buf = Vec::new();
let mut reloc_sink = RelocSink::new();
let mut trap_sink = TrapSink::new();
2019-01-09 02:57:28 +00:00
ctx.compile_and_emit(isa, &mut code_buf, &mut reloc_sink, &mut trap_sink)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;
ctx.clear();
// Round up each function's size to pointer alignment.
total_size += round_up(code_buf.len(), mem::size_of::<usize>());
compiled_functions.push(code_buf);
relocations.push(reloc_sink.func_relocs);
trap_sinks.push(trap_sink);
}
let mut memory = Mmap::with_size(total_size)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;
unsafe {
memory
.protect(0..memory.size(), Protect::ReadWrite)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;
}
// Normally, excess memory due to alignment and page-rounding would
// be filled with null-bytes. On x86 (and x86_64),
// "\x00\x00" disassembles to "add byte ptr [eax],al".
//
// If the instruction pointer falls out of its designated area,
// it would be better if it would immediately crash instead of
// continuing on and causing non-local issues.
//
// "\xCC" disassembles to "int3", which will immediately cause
// an interrupt that we can catch if we want.
for i in unsafe { memory.as_slice_mut() } {
*i = 0xCC;
}
let mut map = Map::with_capacity(compiled_functions.len());
let mut previous_end = 0;
for compiled in compiled_functions.iter() {
let new_end = previous_end + round_up(compiled.len(), mem::size_of::<usize>());
unsafe {
2019-01-09 02:57:28 +00:00
memory.as_slice_mut()[previous_end..previous_end + compiled.len()]
.copy_from_slice(&compiled[..]);
}
map.push(previous_end);
previous_end = new_end;
}
Ok(Self {
2019-01-17 01:59:12 +00:00
resolver: FuncResolver { map, memory },
relocations,
trap_sinks,
})
}
pub fn finalize(mut self) -> CompileResult<FuncResolver> {
for (index, relocs) in self.relocations.iter() {
for ref reloc in relocs {
let target_func_address: isize = match reloc.target {
2019-01-17 01:59:12 +00:00
RelocationType::Normal(local_func_index) => {
// This will always be an internal function
// because imported functions are not
// called in this way.
2019-01-17 01:59:12 +00:00
self.resolver.lookup(local_func_index).unwrap().as_ptr() as isize
}
RelocationType::LibCall(libcall) => match libcall {
ir::LibCall::CeilF32 => libcalls::ceilf32 as isize,
ir::LibCall::FloorF32 => libcalls::floorf32 as isize,
ir::LibCall::TruncF32 => libcalls::truncf32 as isize,
ir::LibCall::NearestF32 => libcalls::nearbyintf32 as isize,
ir::LibCall::CeilF64 => libcalls::ceilf64 as isize,
ir::LibCall::FloorF64 => libcalls::floorf64 as isize,
ir::LibCall::TruncF64 => libcalls::truncf64 as isize,
ir::LibCall::NearestF64 => libcalls::nearbyintf64 as isize,
ir::LibCall::Probestack => libcalls::__rust_probestack as isize,
_ => {
Err(CompileError::InternalError {
msg: format!("unexpected libcall: {}", libcall),
})?
}
},
RelocationType::Intrinsic(ref name) => {
Err(CompileError::InternalError {
msg: format!("unexpected intrinsic: {}", name),
})?
}
2019-01-17 21:09:05 +00:00
RelocationType::VmCall(vmcall) => match vmcall {
VmCall::LocalStaticMemoryGrow => vmcalls::local_static_memory_grow as _,
VmCall::LocalStaticMemorySize => vmcalls::local_static_memory_size as _,
VmCall::ImportedStaticMemoryGrow => {
vmcalls::imported_static_memory_grow as _
}
VmCall::ImportedStaticMemorySize => {
vmcalls::imported_static_memory_size as _
}
},
};
// We need the address of the current function
// because these calls are relative.
let func_addr = self.resolver.lookup(index).unwrap().as_ptr();
// Determine relocation type and apply relocation.
match reloc.reloc {
2019-01-10 01:32:02 +00:00
Reloc::Abs8 => {
let ptr_to_write = (target_func_address as u64)
.checked_add(reloc.addend as u64)
.unwrap();
2019-01-10 01:32:02 +00:00
let empty_space_offset = self.resolver.map[index] + reloc.offset as usize;
let ptr_slice = unsafe {
2019-01-11 03:59:57 +00:00
&mut self.resolver.memory.as_slice_mut()
[empty_space_offset..empty_space_offset + 8]
2019-01-10 01:32:02 +00:00
};
2019-01-11 03:59:57 +00:00
LittleEndian::write_u64(ptr_slice, ptr_to_write);
}
Reloc::X86PCRel4 => unsafe {
let reloc_address = func_addr.offset(reloc.offset as isize) as isize;
let reloc_addend = reloc.addend as isize;
// TODO: Handle overflow.
let reloc_delta_i32 =
(target_func_address - reloc_address + reloc_addend) as i32;
write_unaligned(reloc_address as *mut i32, reloc_delta_i32);
},
_ => Err(CompileError::InternalError {
msg: format!("unsupported reloc kind: {}", reloc.reloc),
})?,
}
}
}
unsafe {
2019-01-09 02:57:28 +00:00
self.resolver
.memory
.protect(0..self.resolver.memory.size(), Protect::ReadExec)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;;
}
Ok(self.resolver)
}
}
/// Resolves a function index to a function address.
pub struct FuncResolver {
2019-01-16 18:26:10 +00:00
map: Map<LocalFuncIndex, usize>,
memory: Mmap,
}
impl FuncResolver {
2019-01-16 18:26:10 +00:00
fn lookup(&self, local_func_index: LocalFuncIndex) -> Option<NonNull<vm::Func>> {
let offset = *self.map.get(local_func_index)?;
2019-01-09 02:57:28 +00:00
let ptr = unsafe { self.memory.as_ptr().add(offset) };
NonNull::new(ptr).map(|nonnull| nonnull.cast())
}
}
// Implements FuncResolver trait.
impl backend::FuncResolver for FuncResolver {
2019-01-11 03:59:57 +00:00
fn get(
&self,
2019-01-13 03:02:19 +00:00
_module: &wasmer_runtime::module::ModuleInner,
2019-01-17 01:59:12 +00:00
index: LocalFuncIndex,
2019-01-11 03:59:57 +00:00
) -> Option<NonNull<vm::Func>> {
self.lookup(index)
}
}
#[inline]
fn round_up(n: usize, multiple: usize) -> usize {
(n + multiple - 1) & !(multiple - 1)
2019-01-11 03:59:57 +00:00
}