Change memory access api again

This commit is contained in:
Lachlan Sneff 2019-02-04 23:07:58 -08:00
parent 563cda7ba2
commit 00db5410c5
10 changed files with 465 additions and 241 deletions

View File

@ -3,7 +3,7 @@ use crate::{
export::{Context, Export},
global::Global,
import::ImportObject,
memory::{Memory, MemoryVariant},
memory::Memory,
module::{ImportName, ModuleInner},
structures::{BoxedMap, Map, SliceMap, TypedIndex},
table::Table,
@ -17,7 +17,7 @@ use std::slice;
#[derive(Debug)]
pub struct LocalBacking {
pub(crate) memories: BoxedMap<LocalMemoryIndex, MemoryVariant>,
pub(crate) memories: BoxedMap<LocalMemoryIndex, Memory>,
pub(crate) tables: BoxedMap<LocalTableIndex, Table>,
pub(crate) globals: BoxedMap<LocalGlobalIndex, Global>,
@ -57,26 +57,11 @@ impl LocalBacking {
}
}
fn generate_memories(module: &ModuleInner) -> BoxedMap<LocalMemoryIndex, MemoryVariant> {
fn generate_memories(module: &ModuleInner) -> BoxedMap<LocalMemoryIndex, Memory> {
let mut memories = Map::with_capacity(module.memories.len());
for (_, &desc) in &module.memories {
// If we use emscripten, we set a fixed initial and maximum
// let memory = if options.abi == InstanceABI::Emscripten {
// // We use MAX_PAGES, so at the end the result is:
// // (initial * Memory::PAGE_SIZE) == Memory::DEFAULT_HEAP_SIZE
// // However, it should be: (initial * Memory::PAGE_SIZE) == 16777216
// Memory::new(Memory::MAX_PAGES, None)
// } else {
// Memory::new(memory.minimum, memory.maximum.map(|m| m as u32))
// };
let memory_variant = if desc.shared {
MemoryVariant::Shared(Memory::new(desc).expect("unable to create memory"))
} else {
MemoryVariant::Unshared(Memory::new(desc).expect("unable to create memory"))
};
memories.push(memory_variant);
memories.push(Memory::new(desc).expect("unable to create memory"));
}
memories.into_boxed_map()
@ -85,7 +70,7 @@ impl LocalBacking {
fn finalize_memories(
module: &ModuleInner,
imports: &ImportBacking,
memories: &mut SliceMap<LocalMemoryIndex, MemoryVariant>,
memories: &mut SliceMap<LocalMemoryIndex, Memory>,
) -> BoxedMap<LocalMemoryIndex, *mut vm::LocalMemory> {
// For each init that has some data...
for init in module
@ -112,13 +97,13 @@ impl LocalBacking {
assert!(memory_desc.minimum.bytes().0 >= data_top);
let mem = &memories[local_memory_index];
match mem {
MemoryVariant::Unshared(unshared_mem) => unshared_mem.access_mut()
[init_base..init_base + init.data.len()]
.copy_from_slice(&init.data),
MemoryVariant::Shared(shared_mem) => shared_mem.access_mut()
[init_base..init_base + init.data.len()]
.copy_from_slice(&init.data),
for (mem_byte, data_byte) in mem
.view(init_base..init_base + init.data.len())
.unwrap()
.iter()
.zip(init.data.iter())
{
mem_byte.set(*data_byte);
}
}
LocalOrImport::Import(imported_memory_index) => {
@ -139,10 +124,7 @@ impl LocalBacking {
memories
.iter_mut()
.map(|(_, mem)| match mem {
MemoryVariant::Unshared(unshared_mem) => unshared_mem.vm_local_memory(),
MemoryVariant::Shared(shared_mem) => shared_mem.vm_local_memory(),
})
.map(|(_, mem)| mem.vm_local_memory())
.collect::<Map<_, _>>()
.into_boxed_map()
}
@ -298,7 +280,7 @@ impl LocalBacking {
#[derive(Debug)]
pub struct ImportBacking {
pub(crate) memories: BoxedMap<ImportedMemoryIndex, MemoryVariant>,
pub(crate) memories: BoxedMap<ImportedMemoryIndex, Memory>,
pub(crate) tables: BoxedMap<ImportedTableIndex, Table>,
pub(crate) globals: BoxedMap<ImportedGlobalIndex, Global>,
@ -433,7 +415,7 @@ fn import_memories(
module: &ModuleInner,
imports: &ImportObject,
) -> LinkResult<(
BoxedMap<ImportedMemoryIndex, MemoryVariant>,
BoxedMap<ImportedMemoryIndex, Memory>,
BoxedMap<ImportedMemoryIndex, *mut vm::LocalMemory>,
)> {
let mut link_errors = vec![];
@ -446,23 +428,16 @@ fn import_memories(
.get_namespace(&namespace)
.and_then(|namespace| namespace.get_export(&name));
match memory_import {
Some(Export::Memory(mut memory)) => {
let (descriptor, vm_local_memory) = match &mut memory {
MemoryVariant::Unshared(unshared_mem) => {
(unshared_mem.descriptor(), unshared_mem.vm_local_memory())
}
MemoryVariant::Shared(_) => unimplemented!(),
};
if expected_memory_desc.fits_in_imported(descriptor) {
Some(Export::Memory(memory)) => {
if expected_memory_desc.fits_in_imported(memory.descriptor()) {
memories.push(memory.clone());
vm_memories.push(vm_local_memory);
vm_memories.push(memory.vm_local_memory());
} else {
link_errors.push(LinkError::IncorrectMemoryDescriptor {
namespace: namespace.clone(),
name: name.clone(),
expected: *expected_memory_desc,
found: descriptor,
found: memory.descriptor(),
});
}
}

View File

@ -1,5 +1,5 @@
use crate::{
global::Global, instance::InstanceInner, memory::MemoryVariant, module::ExportIndex,
global::Global, instance::InstanceInner, memory::Memory, module::ExportIndex,
module::ModuleInner, table::Table, types::FuncSig, vm,
};
use hashbrown::hash_map;
@ -18,7 +18,7 @@ pub enum Export {
ctx: Context,
signature: Arc<FuncSig>,
},
Memory(MemoryVariant),
Memory(Memory),
Table(Table),
Global(Global),
}

View File

@ -5,7 +5,7 @@ use crate::{
export::{Context, Export, ExportIter, FuncPointer},
global::Global,
import::{ImportObject, LikeNamespace},
memory::MemoryVariant,
memory::Memory,
module::{ExportIndex, Module, ModuleInner},
table::Table,
typed_func::{Func, Safe, WasmTypeList},
@ -372,7 +372,7 @@ impl InstanceInner {
(unsafe { FuncPointer::new(func_ptr) }, ctx, signature)
}
fn get_memory_from_index(&self, module: &ModuleInner, mem_index: MemoryIndex) -> MemoryVariant {
fn get_memory_from_index(&self, module: &ModuleInner, mem_index: MemoryIndex) -> Memory {
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => self.backing.memories[local_mem_index].clone(),
LocalOrImport::Import(imported_mem_index) => {

View File

@ -0,0 +1,262 @@
//! This is mostly copied from https://docs.rs/integer-atomics/1.0.2/src/integer_atomics/atomic.rs.html
//! Many thanks to "main()" for writing this.
use std::cell::UnsafeCell;
use std::mem;
use std::num::Wrapping;
use std::ops::{Add, BitAnd, BitOr, BitXor, Sub};
use std::panic::RefUnwindSafe;
use std::sync::atomic::{AtomicUsize, Ordering};
pub trait IntCast:
Copy
+ Eq
+ Add<Output = Self>
+ BitAnd<Output = Self>
+ BitOr<Output = Self>
+ BitXor<Output = Self>
+ Sub<Output = Self>
{
type Public: PartialEq + Copy;
fn from(u: usize) -> Self;
fn to(self) -> usize;
fn new(p: Self::Public) -> Self;
fn unwrap(self) -> Self::Public;
}
macro_rules! intcast {
($($type:ident)+) => {
$(
impl IntCast for Wrapping<$type> {
type Public = $type;
fn from(u: usize) -> Self {
Wrapping(u as $type)
}
fn to(self) -> usize {
self.0 as usize
}
fn new(p: $type) -> Self {
Wrapping(p)
}
fn unwrap(self) -> $type {
self.0
}
}
)+
}
}
intcast! { u8 i8 u16 i16 u32 i32 u64 i64 }
pub struct Atomic<T> {
v: UnsafeCell<Wrapping<T>>,
}
impl<T: Default + IntCast> Default for Atomic<T> {
fn default() -> Self {
Self::new(T::default().unwrap())
}
}
// TODO: impl Debug
unsafe impl<T> Sync for Atomic<T> {}
impl<T> RefUnwindSafe for Atomic<T> {}
fn inject<T>(a: usize, b: usize, offset: usize) -> usize {
let mask = ((1 << (mem::size_of::<T>() * 8)) - 1) << offset;
(a & !mask) | (b << offset)
}
// straight from libcore's atomic.rs
#[inline]
fn strongest_failure_ordering(order: Ordering) -> Ordering {
use self::Ordering::*;
match order {
Release => Relaxed,
Relaxed => Relaxed,
SeqCst => SeqCst,
Acquire => Acquire,
AcqRel => Acquire,
_ => unreachable!(),
}
}
impl<T: IntCast> Atomic<T> {
#[inline]
fn proxy(&self) -> (&AtomicUsize, usize) {
let ptr = self.v.get() as usize;
let aligned = ptr & !(mem::size_of::<usize>() - 1);
(
unsafe { &*(aligned as *const AtomicUsize) },
(ptr - aligned) * 8,
)
}
#[inline]
pub(super) fn new(v: T::Public) -> Self {
Atomic {
v: UnsafeCell::new(Wrapping(T::new(v))),
}
}
#[inline]
pub fn get_mut(&mut self) -> &mut T::Public {
unsafe { &mut *(self.v.get() as *mut T::Public) }
}
#[inline]
pub fn into_inner(self) -> T::Public {
self.v.into_inner().0.unwrap()
}
#[inline]
pub fn load(&self, order: Ordering) -> T::Public {
let (p, o) = self.proxy();
T::from(p.load(order) >> o).unwrap()
}
#[inline]
fn op<F: Fn(T) -> Option<T>>(&self, f: F, order: Ordering) -> T::Public {
self.op_new(f, order, strongest_failure_ordering(order))
}
#[inline]
fn op_new<F: Fn(T) -> Option<T>>(
&self,
f: F,
success: Ordering,
failure: Ordering,
) -> T::Public {
let (p, o) = self.proxy();
let mut old = p.load(Ordering::Relaxed);
loop {
let old_t = T::from(old >> o);
let new_t = match f(old_t) {
Some(x) => x,
None => return old_t.unwrap(),
};
match Self::op_weak(p, o, old, new_t, success, failure) {
Ok(()) => return T::from(old >> o).unwrap(),
Err(prev) => old = prev,
};
}
}
#[inline]
fn op_weak(
p: &AtomicUsize,
o: usize,
old: usize,
new_t: T,
success: Ordering,
failure: Ordering,
) -> Result<(), usize> {
let new = inject::<T>(old, new_t.to(), o);
p.compare_exchange_weak(old, new, success, failure)
.map(|_| ())
}
#[inline]
pub fn store(&self, val: T::Public, order: Ordering) {
self.op(|_| Some(T::new(val)), order);
}
#[inline]
pub fn swap(&self, val: T::Public, order: Ordering) -> T::Public {
self.op(|_| Some(T::new(val)), order)
}
#[inline]
pub fn compare_and_swap(
&self,
current: T::Public,
new: T::Public,
order: Ordering,
) -> T::Public {
self.op(
|x| {
if x == T::new(current) {
Some(T::new(new))
} else {
None
}
},
order,
)
}
#[inline]
pub fn compare_exchange(
&self,
current: T::Public,
new: T::Public,
success: Ordering,
failure: Ordering,
) -> Result<T::Public, T::Public> {
match self.op_new(
|x| {
if x == T::new(current) {
Some(T::new(new))
} else {
None
}
},
success,
failure,
) {
x if x == current => Ok(x),
x => Err(x),
}
}
#[inline]
pub fn compare_exchange_weak(
&self,
current: T::Public,
new: T::Public,
success: Ordering,
failure: Ordering,
) -> Result<T::Public, T::Public> {
let (p, o) = self.proxy();
let old = p.load(Ordering::Relaxed);
let old_t = T::from(old >> o).unwrap();
if old_t != current {
return Err(old_t);
}
Self::op_weak(p, o, old, T::new(new), success, failure)
.map(|()| current)
.map_err(|x| T::from(x >> o).unwrap())
}
#[inline]
pub fn fetch_add(&self, val: T::Public, order: Ordering) -> T::Public {
self.op(|x| Some(x + T::new(val)), order)
}
#[inline]
pub fn fetch_sub(&self, val: T::Public, order: Ordering) -> T::Public {
self.op(|x| Some(x - T::new(val)), order)
}
#[inline]
pub fn fetch_and(&self, val: T::Public, order: Ordering) -> T::Public {
self.op(|x| Some(x & T::new(val)), order)
}
#[inline]
pub fn fetch_or(&self, val: T::Public, order: Ordering) -> T::Public {
self.op(|x| Some(x | T::new(val)), order)
}
#[inline]
pub fn fetch_xor(&self, val: T::Public, order: Ordering) -> T::Public {
self.op(|x| Some(x ^ T::new(val)), order)
}
}

View File

@ -4,7 +4,7 @@ use crate::{
import::IsExport,
memory::dynamic::DYNAMIC_GUARD_SIZE,
memory::static_::{SAFE_STATIC_GUARD_SIZE, SAFE_STATIC_HEAP_SIZE},
types::MemoryDescriptor,
types::{MemoryDescriptor, ValueType},
units::Pages,
vm,
};
@ -12,79 +12,36 @@ use std::{
cell::{Cell, Ref, RefCell, RefMut},
fmt,
marker::PhantomData,
ops::{Deref, DerefMut},
mem,
ops::{Bound, Deref, DerefMut, Index, RangeBounds},
ptr,
rc::Rc,
slice,
};
pub use self::atomic::Atomic;
pub use self::dynamic::DynamicMemory;
pub use self::static_::{SharedStaticMemory, StaticMemory};
pub use self::view::{Atomically, MemoryView};
mod atomic;
mod dynamic;
mod static_;
mod view;
pub trait MemoryImpl<'a>: Clone {
type Access: Deref<Target = [u8]>;
type AccessMut: DerefMut<Target = [u8]>;
fn new(desc: MemoryDescriptor) -> Result<Self, CreationError>;
fn grow(&'a self, delta: Pages) -> Option<Pages>;
fn size(&'a self) -> Pages;
fn vm_local_memory(&'a self) -> *mut vm::LocalMemory;
fn access(&'a self) -> Self::Access;
fn access_mut(&'a self) -> Self::AccessMut;
#[derive(Clone)]
enum MemoryVariant {
Unshared(UnsharedMemory),
Shared(SharedMemory),
}
pub trait SharedPolicy
where
Self: Sized,
for<'a> Self::Memory: MemoryImpl<'a>,
{
const SHARED: bool;
type Memory;
fn transform_variant(variants: &MemoryVariant) -> &Memory<Self>;
}
pub struct Shared;
impl SharedPolicy for Shared {
const SHARED: bool = true;
type Memory = SharedMemory;
fn transform_variant(variants: &MemoryVariant) -> &Memory<Self> {
match variants {
MemoryVariant::Shared(shared_mem) => shared_mem,
MemoryVariant::Unshared(_) => {
panic!("cannot transform unshared memory to shared memory")
}
}
}
}
pub struct Unshared;
impl SharedPolicy for Unshared {
const SHARED: bool = false;
type Memory = UnsharedMemory;
fn transform_variant(variants: &MemoryVariant) -> &Memory<Self> {
match variants {
MemoryVariant::Unshared(unshared_mem) => unshared_mem,
MemoryVariant::Shared(_) => panic!("cannot transform shared memory to unshared memory"),
}
}
}
unsafe impl Send for Memory<Shared> {}
unsafe impl Sync for Memory<Shared> {}
pub struct Memory<S = Unshared>
where
S: SharedPolicy,
{
#[derive(Clone)]
pub struct Memory {
desc: MemoryDescriptor,
memory: S::Memory,
_phantom: PhantomData<S>,
variant: MemoryVariant,
}
impl<S> Memory<S>
where
S: SharedPolicy,
{
impl Memory {
/// Create a new `Memory` from a [`MemoryDescriptor`]
///
/// [`MemoryDescriptor`]: struct.MemoryDescriptor.html
@ -103,22 +60,18 @@ where
/// shared: false,
/// };
///
/// let memory: Memory = Memory::new(descriptor)?;
/// let memory = Memory::new(descriptor)?;
/// # Ok(())
/// # }
/// ```
pub fn new(desc: MemoryDescriptor) -> Result<Memory<S>, CreationError> {
assert_eq!(
desc.shared,
S::SHARED,
"type parameter must match description"
);
pub fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
let variant = if !desc.shared {
MemoryVariant::Unshared(UnsharedMemory::new(desc)?)
} else {
MemoryVariant::Shared(SharedMemory::new(desc)?)
};
Ok(Memory {
desc,
memory: S::Memory::new(desc)?,
_phantom: PhantomData,
})
Ok(Memory { desc, variant })
}
/// Return the [`MemoryDescriptor`] that this memory
@ -131,51 +84,81 @@ where
/// Grow this memory by the specfied number of pages.
pub fn grow(&self, delta: Pages) -> Option<Pages> {
self.memory.grow(delta)
match &self.variant {
MemoryVariant::Unshared(unshared_mem) => unshared_mem.grow(delta),
MemoryVariant::Shared(shared_mem) => shared_mem.grow(delta),
}
}
/// The size, in wasm pages, of this memory.
pub fn size(&self) -> Pages {
self.memory.size()
match &self.variant {
MemoryVariant::Unshared(unshared_mem) => unshared_mem.size(),
MemoryVariant::Shared(shared_mem) => shared_mem.size(),
}
}
pub fn access(&self) -> <S::Memory as MemoryImpl>::Access {
self.memory.access()
pub fn view<T: ValueType, R: RangeBounds<usize>>(&self, range: R) -> Option<MemoryView<T>> {
let vm::LocalMemory {
base,
bound,
memory: _,
} = unsafe { *self.vm_local_memory() };
let range_start = match range.start_bound() {
Bound::Included(start) => *start,
Bound::Excluded(start) => *start + 1,
Bound::Unbounded => 0,
};
let range_end = match range.end_bound() {
Bound::Included(end) => *end + 1,
Bound::Excluded(end) => *end,
Bound::Unbounded => bound as usize,
};
let length = range_end - range_start;
let size_in_bytes = mem::size_of::<T>() * length;
if range_end < range_start || range_start + size_in_bytes >= bound {
return None;
}
Some(unsafe { MemoryView::new(base as _, length as u32) })
}
pub fn access_mut(&self) -> <S::Memory as MemoryImpl>::AccessMut {
self.memory.access_mut()
pub fn shared(self) -> Option<SharedMemory> {
if self.desc.shared {
Some(SharedMemory { desc: self.desc })
} else {
None
}
}
pub(crate) fn vm_local_memory(&self) -> *mut vm::LocalMemory {
self.memory.vm_local_memory()
}
}
impl IsExport for Memory<Unshared> {
fn to_export(&self) -> Export {
Export::Memory(MemoryVariant::Unshared(self.clone()))
}
}
impl IsExport for Memory<Shared> {
fn to_export(&self) -> Export {
Export::Memory(MemoryVariant::Shared(self.clone()))
}
}
impl<S> Clone for Memory<S>
where
S: SharedPolicy,
{
fn clone(&self) -> Self {
Self {
desc: self.desc,
memory: self.memory.clone(),
_phantom: PhantomData,
match &self.variant {
MemoryVariant::Unshared(unshared_mem) => unshared_mem.vm_local_memory(),
MemoryVariant::Shared(shared_mem) => unimplemented!(),
}
}
}
impl IsExport for Memory {
fn to_export(&self) -> Export {
Export::Memory(self.clone())
}
}
impl fmt::Debug for Memory {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Memory")
.field("desc", &self.desc)
.field("size", &self.size())
.finish()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MemoryType {
Dynamic,
@ -201,24 +184,6 @@ impl MemoryType {
}
}
impl<S> fmt::Debug for Memory<S>
where
S: SharedPolicy,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Memory")
.field("desc", &self.desc)
.field("size", &self.size())
.finish()
}
}
#[derive(Debug, Clone)]
pub enum MemoryVariant {
Unshared(Memory<Unshared>),
Shared(Memory<Shared>),
}
enum UnsharedMemoryStorage {
Dynamic(Box<DynamicMemory>),
Static(Box<StaticMemory>),
@ -233,11 +198,8 @@ struct UnsharedMemoryInternal {
local: Cell<vm::LocalMemory>,
}
impl<'a> MemoryImpl<'a> for UnsharedMemory {
type Access = Ref<'a, [u8]>;
type AccessMut = RefMut<'a, [u8]>;
fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
impl UnsharedMemory {
pub fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
let mut local = vm::LocalMemory {
base: ptr::null_mut(),
bound: 0,
@ -262,7 +224,7 @@ impl<'a> MemoryImpl<'a> for UnsharedMemory {
})
}
fn grow(&self, delta: Pages) -> Option<Pages> {
pub fn grow(&self, delta: Pages) -> Option<Pages> {
let mut storage = self.internal.storage.borrow_mut();
let mut local = self.internal.local.get();
@ -279,7 +241,7 @@ impl<'a> MemoryImpl<'a> for UnsharedMemory {
pages
}
fn size(&self) -> Pages {
pub fn size(&self) -> Pages {
let storage = self.internal.storage.borrow();
match &*storage {
@ -288,29 +250,9 @@ impl<'a> MemoryImpl<'a> for UnsharedMemory {
}
}
fn vm_local_memory(&self) -> *mut vm::LocalMemory {
pub(crate) fn vm_local_memory(&self) -> *mut vm::LocalMemory {
self.internal.local.as_ptr()
}
fn access(&'a self) -> Ref<'a, [u8]> {
Ref::map(
self.internal.storage.borrow(),
|memory_storage| match memory_storage {
UnsharedMemoryStorage::Dynamic(dynamic_memory) => dynamic_memory.as_slice(),
UnsharedMemoryStorage::Static(static_memory) => static_memory.as_slice(),
},
)
}
fn access_mut(&'a self) -> RefMut<'a, [u8]> {
RefMut::map(
self.internal.storage.borrow_mut(),
|memory_storage| match memory_storage {
UnsharedMemoryStorage::Dynamic(dynamic_memory) => dynamic_memory.as_slice_mut(),
UnsharedMemoryStorage::Static(static_memory) => static_memory.as_slice_mut(),
},
)
}
}
impl Clone for UnsharedMemory {
@ -321,33 +263,28 @@ impl Clone for UnsharedMemory {
}
}
pub struct SharedMemory {}
pub struct SharedMemory {
desc: MemoryDescriptor,
}
impl<'a> MemoryImpl<'a> for SharedMemory {
type Access = Vec<u8>;
type AccessMut = Vec<u8>;
impl SharedMemory {
fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
Ok(Self { desc })
}
fn new(_desc: MemoryDescriptor) -> Result<Self, CreationError> {
pub fn grow(&self, _delta: Pages) -> Option<Pages> {
unimplemented!()
}
fn grow(&self, _delta: Pages) -> Option<Pages> {
pub fn size(&self) -> Pages {
unimplemented!()
}
fn size(&self) -> Pages {
pub unsafe fn as_slice(&self) -> &[u8] {
unimplemented!()
}
fn vm_local_memory(&self) -> *mut vm::LocalMemory {
unimplemented!()
}
fn access(&self) -> Vec<u8> {
unimplemented!()
}
fn access_mut(&self) -> Vec<u8> {
pub unsafe fn as_slice_mut(&self) -> &mut [u8] {
unimplemented!()
}
}

View File

@ -0,0 +1,53 @@
use super::atomic::{Atomic, IntCast};
use crate::types::ValueType;
use std::{cell::Cell, marker::PhantomData, ops::Deref, slice};
pub trait Atomicity {}
pub struct Atomically;
impl Atomicity for Atomically {}
pub struct NonAtomically;
impl Atomicity for NonAtomically {}
pub struct MemoryView<'a, T: 'a, A = NonAtomically> {
ptr: *mut T,
length: usize,
_phantom: PhantomData<(&'a [Cell<T>], A)>,
}
impl<'a, T> MemoryView<'a, T, NonAtomically>
where
T: ValueType,
{
pub(super) unsafe fn new(ptr: *mut T, length: u32) -> Self {
Self {
ptr,
length: length as usize,
_phantom: PhantomData,
}
}
}
impl<'a, T> MemoryView<'a, T, NonAtomically> {
pub fn atomically(self) -> MemoryView<'a, T, Atomically> {
MemoryView {
ptr: self.ptr,
length: self.length,
_phantom: PhantomData,
}
}
}
impl<'a, T> Deref for MemoryView<'a, T, NonAtomically> {
type Target = [Cell<T>];
fn deref(&self) -> &[Cell<T>] {
unsafe { slice::from_raw_parts(self.ptr as *const Cell<T>, self.length) }
}
}
impl<'a, T: IntCast> Deref for MemoryView<'a, T, Atomically> {
type Target = [Atomic<T>];
fn deref(&self) -> &[Atomic<T>] {
unsafe { slice::from_raw_parts(self.ptr as *const Atomic<T>, self.length) }
}
}

View File

@ -90,11 +90,28 @@ unsafe impl WasmExternType for f64 {
const TYPE: Type = Type::F64;
}
// pub trait IntegerAtomic
// where
// Self: Sized
// {
// type Primitive;
// fn add(&self, other: Self::Primitive) -> Self::Primitive;
// fn sub(&self, other: Self::Primitive) -> Self::Primitive;
// fn and(&self, other: Self::Primitive) -> Self::Primitive;
// fn or(&self, other: Self::Primitive) -> Self::Primitive;
// fn xor(&self, other: Self::Primitive) -> Self::Primitive;
// fn load(&self) -> Self::Primitive;
// fn store(&self, other: Self::Primitive) -> Self::Primitive;
// fn compare_exchange(&self, expected: Self::Primitive, new: Self::Primitive) -> Self::Primitive;
// fn swap(&self, other: Self::Primitive) -> Self::Primitive;
// }
pub enum ValueError {
BufferTooSmall,
}
pub trait ValueType: Copy + Clone
pub trait ValueType: Copy
where
Self: Sized,
{
@ -128,24 +145,6 @@ macro_rules! convert_value_impl {
convert_value_impl!(u8, i8, u16, i16, u32, i32, u64, i64);
impl ValueType for f32 {
fn into_le(self, buffer: &mut [u8]) {
self.to_bits().into_le(buffer);
}
fn from_le(buffer: &[u8]) -> Result<Self, ValueError> {
Ok(f32::from_bits(<u32 as ValueType>::from_le(buffer)?))
}
}
impl ValueType for f64 {
fn into_le(self, buffer: &mut [u8]) {
self.to_bits().into_le(buffer);
}
fn from_le(buffer: &[u8]) -> Result<Self, ValueError> {
Ok(f64::from_bits(<u64 as ValueType>::from_le(buffer)?))
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ElementType {
/// Any wasm function.

View File

@ -1,6 +1,6 @@
pub use crate::backing::{ImportBacking, LocalBacking};
use crate::{
memory::{Memory, SharedPolicy},
memory::Memory,
module::ModuleInner,
structures::TypedIndex,
types::{LocalOrImport, MemoryIndex},
@ -114,22 +114,17 @@ impl Ctx {
/// first_memory.access()[0]
/// }
/// ```
pub fn memory<'a, S>(&'a self, mem_index: u32) -> &'a Memory<S>
where
S: SharedPolicy,
{
pub fn memory(&self, mem_index: u32) -> &Memory {
let module = unsafe { &*self.module };
let mem_index = MemoryIndex::new(mem_index as usize);
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => unsafe {
let local_backing = &*self.local_backing;
let mem_variant = &local_backing.memories[local_mem_index];
S::transform_variant(mem_variant)
&local_backing.memories[local_mem_index]
},
LocalOrImport::Import(import_mem_index) => unsafe {
let import_backing = &*self.import_backing;
let mem_variant = &import_backing.memories[import_mem_index];
S::transform_variant(mem_variant)
&import_backing.memories[import_mem_index]
},
}
}

View File

@ -89,10 +89,13 @@ pub use wasmer_runtime_core::error;
pub use wasmer_runtime_core::Func;
pub use wasmer_runtime_core::{func, imports};
pub mod memory {
pub use wasmer_runtime_core::memory::{Atomic, Atomically, Memory, MemoryView};
}
pub mod wasm {
//! Various types exposed by the Wasmer Runtime.
pub use wasmer_runtime_core::global::Global;
pub use wasmer_runtime_core::memory::Memory;
pub use wasmer_runtime_core::table::Table;
pub use wasmer_runtime_core::types::{FuncSig, MemoryDescriptor, TableDescriptor, Type, Value};
}

View File

@ -16,7 +16,7 @@ fn main() -> Result<()> {
let wasm_binary = wat2wasm(IMPORT_MODULE.as_bytes()).expect("WAST not valid or malformed");
let inner_module = wasmer_runtime_core::compile_with(&wasm_binary, &CraneliftCompiler::new())?;
let memory: Memory = Memory::new(MemoryDescriptor {
let memory = Memory::new(MemoryDescriptor {
minimum: Pages(1),
maximum: Some(Pages(1)),
shared: false,