feat: add a histrical wit-bindgen

This commit is contained in:
2023-01-01 00:25:48 +08:00
parent 01e8f5a959
commit aa50d63aec
419 changed files with 45283 additions and 1 deletions

View File

@@ -0,0 +1,40 @@
use crate::Region;
use thiserror::Error;
#[derive(Debug, Error, PartialEq, Eq)]
pub enum GuestError {
#[error("Invalid flag value {0}")]
InvalidFlagValue(&'static str),
#[error("Invalid enum value {0}")]
InvalidEnumValue(&'static str),
#[error("Pointer overflow")]
PtrOverflow,
#[error("Pointer out of bounds: {0:?}")]
PtrOutOfBounds(Region),
#[error("Pointer not aligned to {1}: {0:?}")]
PtrNotAligned(Region, u32),
#[error("Pointer already borrowed: {0:?}")]
PtrBorrowed(Region),
#[error("Borrow checker out of handles")]
BorrowCheckerOutOfHandles,
#[error("Slice length mismatch")]
SliceLengthsDiffer,
#[error("In func {funcname}:{location}:")]
InFunc {
funcname: &'static str,
location: &'static str,
#[source]
err: Box<GuestError>,
},
#[error("In data {typename}.{field}:")]
InDataField {
typename: String,
field: String,
#[source]
err: Box<GuestError>,
},
#[error("Invalid UTF-8 encountered: {0:?}")]
InvalidUtf8(#[from] ::std::str::Utf8Error),
#[error("Int conversion error: {0:?}")]
TryFromIntError(#[from] ::std::num::TryFromIntError),
}

View File

@@ -0,0 +1,194 @@
use crate::AllBytesValid;
use std::cmp::Ordering;
use std::fmt;
use std::mem;
use std::slice;
/// Helper type representing a 1-byte-aligned little-endian value in memory.
///
/// This type is used in slice types for Wasmtime host bindings. Guest types are
/// not guaranteed to be either aligned or in the native endianness. This type
/// wraps these types and provides explicit getters/setters to interact with the
/// underlying value in a safe host-agnostic manner.
#[repr(packed)]
pub struct Le<T>(T);
impl<T> Le<T>
where
T: Endian,
{
/// Creates a new `Le<T>` value where the internals are stored in a way
/// that's safe to copy into wasm linear memory.
pub fn new(t: T) -> Le<T> {
Le(t.into_le())
}
/// Reads the value stored in this `Le<T>`.
///
/// This will perform a correct read even if the underlying memory is
/// unaligned, and it will also convert to the host's endianness for the
/// right representation of `T`.
pub fn get(&self) -> T {
self.0.from_le()
}
/// Writes the `val` to this slot.
///
/// This will work correctly even if the underlying memory is unaligned and
/// it will also automatically convert the `val` provided to an endianness
/// appropriate for WebAssembly (little-endian).
pub fn set(&mut self, val: T) {
self.0 = val.into_le();
}
pub(crate) fn from_slice(bytes: &[u8]) -> &[Le<T>] {
// SAFETY: The invariants we uphold here are:
//
// * the lifetime of the input is the same as the output, so we're only
// dealing with valid memory.
// * the alignment of the input is the same as the output (1)
// * the input isn't being truncated and we're consuming all of it (it
// must be a multiple of the size of `Le<T>`)
// * all byte-patterns for `Le<T>` are valid. This is guaranteed by the
// `AllBytesValid` supertrait of `Endian`.
unsafe {
assert_eq!(mem::align_of::<Le<T>>(), 1);
assert!(bytes.len() % mem::size_of::<Le<T>>() == 0);
fn all_bytes_valid<T: AllBytesValid>() {}
all_bytes_valid::<Le<T>>();
slice::from_raw_parts(
bytes.as_ptr().cast::<Le<T>>(),
bytes.len() / mem::size_of::<Le<T>>(),
)
}
}
pub(crate) fn from_slice_mut(bytes: &mut [u8]) -> &mut [Le<T>] {
// SAFETY: see `from_slice` above
//
// Note that both the input and the output are `mut`, helping to
// maintain the guarantee of uniqueness.
unsafe {
assert_eq!(mem::align_of::<Le<T>>(), 1);
assert!(bytes.len() % mem::size_of::<Le<T>>() == 0);
slice::from_raw_parts_mut(
bytes.as_mut_ptr().cast::<Le<T>>(),
bytes.len() / mem::size_of::<Le<T>>(),
)
}
}
}
impl<T: Copy> Clone for Le<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T: Copy> Copy for Le<T> {}
impl<T: Endian + PartialEq> PartialEq for Le<T> {
fn eq(&self, other: &Le<T>) -> bool {
self.get() == other.get()
}
}
impl<T: Endian + PartialEq> PartialEq<T> for Le<T> {
fn eq(&self, other: &T) -> bool {
self.get() == *other
}
}
impl<T: Endian + Eq> Eq for Le<T> {}
impl<T: Endian + PartialOrd> PartialOrd for Le<T> {
fn partial_cmp(&self, other: &Le<T>) -> Option<Ordering> {
self.get().partial_cmp(&other.get())
}
}
impl<T: Endian + Ord> Ord for Le<T> {
fn cmp(&self, other: &Le<T>) -> Ordering {
self.get().cmp(&other.get())
}
}
impl<T: Endian + fmt::Debug> fmt::Debug for Le<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.get().fmt(f)
}
}
impl<T: Endian> From<T> for Le<T> {
fn from(t: T) -> Le<T> {
Le::new(t)
}
}
unsafe impl<T: AllBytesValid> AllBytesValid for Le<T> {}
/// Trait used for the implementation of the `Le` type.
pub trait Endian: AllBytesValid + Copy + Sized {
/// Converts this value and any aggregate fields (if any) into little-endian
/// byte order
fn into_le(self) -> Self;
/// Converts this value and any aggregate fields (if any) from
/// little-endian byte order
fn from_le(self) -> Self;
}
macro_rules! primitives {
($($t:ident)*) => ($(
impl Endian for $t {
#[inline]
fn into_le(self) -> Self {
Self::from_ne_bytes(self.to_le_bytes())
}
#[inline]
fn from_le(self) -> Self {
Self::from_le_bytes(self.to_ne_bytes())
}
}
)*)
}
primitives! {
u8 i8
u16 i16
u32 i32
u64 i64
f32 f64
}
macro_rules! tuples {
($(($($t:ident)*))*) => ($(
#[allow(non_snake_case)]
impl <$($t:Endian,)*> Endian for ($($t,)*) {
fn into_le(self) -> Self {
let ($($t,)*) = self;
($($t.into_le(),)*)
}
fn from_le(self) -> Self {
let ($($t,)*) = self;
($($t.from_le(),)*)
}
}
)*)
}
tuples! {
()
(T1)
(T1 T2)
(T1 T2 T3)
(T1 T2 T3 T4)
(T1 T2 T3 T4 T5)
(T1 T2 T3 T4 T5 T6)
(T1 T2 T3 T4 T5 T6 T7)
(T1 T2 T3 T4 T5 T6 T7 T8)
(T1 T2 T3 T4 T5 T6 T7 T8 T9)
(T1 T2 T3 T4 T5 T6 T7 T8 T9 T10)
}

View File

@@ -0,0 +1,248 @@
pub use wit_bindgen_wasmtime_impl::{export, import};
#[cfg(feature = "async")]
pub use async_trait::async_trait;
#[cfg(feature = "tracing-lib")]
pub use tracing_lib as tracing;
#[doc(hidden)]
pub use {anyhow, bitflags, wasmtime};
mod error;
mod le;
mod region;
mod slab;
mod table;
pub use error::GuestError;
pub use le::{Endian, Le};
pub use region::{AllBytesValid, BorrowChecker, Region};
pub use table::*;
#[doc(hidden)]
pub mod rt {
use crate::slab::Slab;
use crate::{Endian, Le};
use std::mem;
use wasmtime::*;
pub trait RawMem {
fn store<T: Endian>(&mut self, offset: i32, val: T) -> Result<(), Trap>;
fn store_many<T: Endian>(&mut self, offset: i32, vals: &[T]) -> Result<(), Trap>;
fn load<T: Endian>(&self, offset: i32) -> Result<T, Trap>;
}
impl RawMem for [u8] {
fn store<T: Endian>(&mut self, offset: i32, val: T) -> Result<(), Trap> {
let mem = self
.get_mut(offset as usize..)
.and_then(|m| m.get_mut(..mem::size_of::<T>()))
.ok_or_else(|| Trap::new("out of bounds write"))?;
Le::from_slice_mut(mem)[0].set(val);
Ok(())
}
fn store_many<T: Endian>(&mut self, offset: i32, val: &[T]) -> Result<(), Trap> {
let mem = self
.get_mut(offset as usize..)
.and_then(|m| {
let len = mem::size_of::<T>().checked_mul(val.len())?;
m.get_mut(..len)
})
.ok_or_else(|| Trap::new("out of bounds write"))?;
for (slot, val) in Le::from_slice_mut(mem).iter_mut().zip(val) {
slot.set(*val);
}
Ok(())
}
fn load<T: Endian>(&self, offset: i32) -> Result<T, Trap> {
let mem = self
.get(offset as usize..)
.and_then(|m| m.get(..mem::size_of::<Le<T>>()))
.ok_or_else(|| Trap::new("out of bounds read"))?;
Ok(Le::from_slice(mem)[0].get())
}
}
pub fn char_from_i32(val: i32) -> Result<char, Trap> {
core::char::from_u32(val as u32).ok_or_else(|| Trap::new("char value out of valid range"))
}
pub fn invalid_variant(name: &str) -> Trap {
let msg = format!("invalid discriminant for `{}`", name);
Trap::new(msg)
}
pub fn validate_flags<T, U>(
bits: T,
all: T,
name: &str,
mk: impl FnOnce(T) -> U,
) -> Result<U, Trap>
where
T: std::ops::Not<Output = T> + std::ops::BitAnd<Output = T> + From<u8> + PartialEq + Copy,
{
if bits & !all != 0u8.into() {
let msg = format!("invalid flags specified for `{}`", name);
Err(Trap::new(msg))
} else {
Ok(mk(bits))
}
}
pub fn get_func<T>(caller: &mut Caller<'_, T>, func: &str) -> Result<Func, wasmtime::Trap> {
let func = caller
.get_export(func)
.ok_or_else(|| {
let msg = format!("`{}` export not available", func);
Trap::new(msg)
})?
.into_func()
.ok_or_else(|| {
let msg = format!("`{}` export not a function", func);
Trap::new(msg)
})?;
Ok(func)
}
pub fn get_memory<T>(caller: &mut Caller<'_, T>, mem: &str) -> Result<Memory, wasmtime::Trap> {
let mem = caller
.get_export(mem)
.ok_or_else(|| {
let msg = format!("`{}` export not available", mem);
Trap::new(msg)
})?
.into_memory()
.ok_or_else(|| {
let msg = format!("`{}` export not a memory", mem);
Trap::new(msg)
})?;
Ok(mem)
}
pub fn bad_int(_: std::num::TryFromIntError) -> Trap {
let msg = "out-of-bounds integer conversion";
Trap::new(msg)
}
pub fn copy_slice<T: Endian>(
store: impl AsContextMut,
memory: &Memory,
base: i32,
len: i32,
_align: i32,
) -> Result<Vec<T>, Trap> {
let size = (len as u32)
.checked_mul(mem::size_of::<T>() as u32)
.ok_or_else(|| Trap::new("array too large to fit in wasm memory"))?;
let slice = memory
.data(&store)
.get(base as usize..)
.and_then(|s| s.get(..size as usize))
.ok_or_else(|| Trap::new("out of bounds read"))?;
Ok(Le::from_slice(slice).iter().map(|s| s.get()).collect())
}
macro_rules! as_traits {
($(($name:ident $tr:ident $ty:ident ($($tys:ident)*)))*) => ($(
pub fn $name<T: $tr>(t: T) -> $ty {
t.$name()
}
pub trait $tr {
fn $name(self) -> $ty;
}
impl<'a, T: Copy + $tr> $tr for &'a T {
fn $name(self) -> $ty {
(*self).$name()
}
}
$(
impl $tr for $tys {
#[inline]
fn $name(self) -> $ty {
self as $ty
}
}
)*
)*)
}
as_traits! {
(as_i32 AsI32 i32 (char i8 u8 i16 u16 i32 u32))
(as_i64 AsI64 i64 (i64 u64))
(as_f32 AsF32 f32 (f32))
(as_f64 AsF64 f64 (f64))
}
#[derive(Default, Debug)]
pub struct IndexSlab {
slab: Slab<ResourceIndex>,
}
impl IndexSlab {
pub fn insert(&mut self, resource: ResourceIndex) -> u32 {
self.slab.insert(resource)
}
pub fn get(&self, slab_idx: u32) -> Result<ResourceIndex, Trap> {
match self.slab.get(slab_idx) {
Some(idx) => Ok(*idx),
None => Err(Trap::new("invalid index specified for handle")),
}
}
pub fn remove(&mut self, slab_idx: u32) -> Result<ResourceIndex, Trap> {
match self.slab.remove(slab_idx) {
Some(idx) => Ok(idx),
None => Err(Trap::new("invalid index specified for handle")),
}
}
}
#[derive(Default, Debug)]
pub struct ResourceSlab {
slab: Slab<Resource>,
}
#[derive(Debug)]
struct Resource {
wasm: i32,
refcnt: u32,
}
#[derive(Debug, Copy, Clone)]
pub struct ResourceIndex(u32);
impl ResourceSlab {
pub fn insert(&mut self, wasm: i32) -> ResourceIndex {
ResourceIndex(self.slab.insert(Resource { wasm, refcnt: 1 }))
}
pub fn get(&self, idx: ResourceIndex) -> i32 {
self.slab.get(idx.0).unwrap().wasm
}
pub fn clone(&mut self, idx: ResourceIndex) -> Result<(), Trap> {
let resource = self.slab.get_mut(idx.0).unwrap();
resource.refcnt = match resource.refcnt.checked_add(1) {
Some(cnt) => cnt,
None => return Err(Trap::new("resource index count overflow")),
};
Ok(())
}
pub fn drop(&mut self, idx: ResourceIndex) -> Option<i32> {
let resource = self.slab.get_mut(idx.0).unwrap();
assert!(resource.refcnt > 0);
resource.refcnt -= 1;
if resource.refcnt != 0 {
return None;
}
let resource = self.slab.remove(idx.0).unwrap();
Some(resource.wasm)
}
}
}

View File

@@ -0,0 +1,312 @@
use crate::rt::RawMem;
use crate::{Endian, GuestError, Le};
use std::collections::HashSet;
use std::convert::TryInto;
use std::marker;
use std::mem;
use wasmtime::Trap;
// This is a pretty naive way to account for borrows. This datastructure
// could be made a lot more efficient with some effort.
pub struct BorrowChecker<'a> {
/// Maps from handle to region borrowed. A HashMap is probably not ideal
/// for this but it works. It would be more efficient if we could
/// check `is_borrowed` without an O(n) iteration, by organizing borrows
/// by an ordering of Region.
shared_borrows: HashSet<Region>,
mut_borrows: HashSet<Region>,
_marker: marker::PhantomData<&'a mut [u8]>,
ptr: *mut u8,
len: usize,
}
// These are not automatically implemented with our storage of `*mut u8`, so we
// need to manually declare that this type is threadsafe.
unsafe impl Send for BorrowChecker<'_> {}
unsafe impl Sync for BorrowChecker<'_> {}
fn to_trap(err: impl std::error::Error + Send + Sync + 'static) -> Trap {
Trap::from(Box::new(err) as Box<dyn std::error::Error + Send + Sync>)
}
impl<'a> BorrowChecker<'a> {
pub fn new(data: &'a mut [u8]) -> BorrowChecker<'a> {
BorrowChecker {
ptr: data.as_mut_ptr(),
len: data.len(),
shared_borrows: Default::default(),
mut_borrows: Default::default(),
_marker: marker::PhantomData,
}
}
pub fn slice<T: AllBytesValid>(&mut self, ptr: i32, len: i32) -> Result<&'a [T], Trap> {
let (ret, r) = self.get_slice(ptr, len)?;
// SAFETY: We're promoting the valid lifetime of `ret` from a temporary
// borrow on `self` to `'a` on this `BorrowChecker`. At the same time
// we're recording that this is a persistent shared borrow (until this
// borrow checker is deleted), which disallows future mutable borrows
// of the same data.
let ret = unsafe { &*(ret as *const [T]) };
self.shared_borrows.insert(r);
Ok(ret)
}
pub fn slice_mut<T: AllBytesValid>(&mut self, ptr: i32, len: i32) -> Result<&'a mut [T], Trap> {
let (ret, r) = self.get_slice_mut(ptr, len)?;
// SAFETY: see `slice` for how we're extending the lifetime by
// recording the borrow here. Note that the `mut_borrows` list is
// checked on both shared and mutable borrows in the future since a
// mutable borrow can't alias with anything.
let ret = unsafe { &mut *(ret as *mut [T]) };
self.mut_borrows.insert(r);
Ok(ret)
}
fn get_slice<T: AllBytesValid>(&self, ptr: i32, len: i32) -> Result<(&[T], Region), Trap> {
let r = self.region::<T>(ptr, len)?;
if self.is_mut_borrowed(r) {
Err(to_trap(GuestError::PtrBorrowed(r)))
} else {
Ok((
// SAFETY: invariants to uphold:
//
// * The lifetime of the input is valid for the lifetime of the
// output. In this case we're threading through the lifetime
// of `&self` to the output.
// * The actual output is valid, which is guaranteed with the
// `AllBytesValid` bound.
// * We uphold Rust's borrowing guarantees, namely that this
// borrow we're returning isn't overlapping with any mutable
// borrows.
// * The region `r` we're returning accurately describes the
// slice we're returning in wasm linear memory.
unsafe {
std::slice::from_raw_parts(
self.ptr.add(r.start as usize) as *const T,
len as usize,
)
},
r,
))
}
}
fn get_slice_mut<T>(&mut self, ptr: i32, len: i32) -> Result<(&mut [T], Region), Trap> {
let r = self.region::<T>(ptr, len)?;
if self.is_mut_borrowed(r) || self.is_shared_borrowed(r) {
Err(to_trap(GuestError::PtrBorrowed(r)))
} else {
Ok((
// SAFETY: same as `get_slice`, except for that we're threading
// through `&mut` properties as well.
unsafe {
std::slice::from_raw_parts_mut(
self.ptr.add(r.start as usize) as *mut T,
len as usize,
)
},
r,
))
}
}
fn region<T>(&self, ptr: i32, len: i32) -> Result<Region, Trap> {
assert_eq!(std::mem::align_of::<T>(), 1);
let r = Region {
start: ptr as u32,
len: (len as u32)
.checked_mul(mem::size_of::<T>() as u32)
.ok_or_else(|| to_trap(GuestError::PtrOverflow))?,
};
self.validate_contains(&r)?;
Ok(r)
}
pub fn slice_str(&mut self, ptr: i32, len: i32) -> Result<&'a str, Trap> {
let bytes = self.slice(ptr, len)?;
std::str::from_utf8(bytes).map_err(to_trap)
}
fn validate_contains(&self, region: &Region) -> Result<(), Trap> {
let end = region
.start
.checked_add(region.len)
.ok_or_else(|| to_trap(GuestError::PtrOverflow))? as usize;
if end <= self.len {
Ok(())
} else {
Err(to_trap(GuestError::PtrOutOfBounds(*region)))
}
}
fn is_shared_borrowed(&self, r: Region) -> bool {
self.shared_borrows.iter().any(|b| b.overlaps(r))
}
fn is_mut_borrowed(&self, r: Region) -> bool {
self.mut_borrows.iter().any(|b| b.overlaps(r))
}
pub fn raw(&self) -> *mut [u8] {
std::ptr::slice_from_raw_parts_mut(self.ptr, self.len)
}
}
impl RawMem for BorrowChecker<'_> {
fn store<T: Endian>(&mut self, offset: i32, val: T) -> Result<(), Trap> {
let (slice, _) = self.get_slice_mut::<Le<T>>(offset, 1)?;
slice[0].set(val);
Ok(())
}
fn store_many<T: Endian>(&mut self, offset: i32, val: &[T]) -> Result<(), Trap> {
let (slice, _) = self.get_slice_mut::<Le<T>>(
offset,
val.len()
.try_into()
.map_err(|_| to_trap(GuestError::PtrOverflow))?,
)?;
for (slot, val) in slice.iter_mut().zip(val) {
slot.set(*val);
}
Ok(())
}
fn load<T: Endian>(&self, offset: i32) -> Result<T, Trap> {
let (slice, _) = self.get_slice::<Le<T>>(offset, 1)?;
Ok(slice[0].get())
}
}
/// Unsafe trait representing types where every byte pattern is valid for their
/// representation.
///
/// This is the set of types which wasmtime can have a raw pointer to for
/// values which reside in wasm linear memory.
pub unsafe trait AllBytesValid {}
unsafe impl AllBytesValid for u8 {}
unsafe impl AllBytesValid for u16 {}
unsafe impl AllBytesValid for u32 {}
unsafe impl AllBytesValid for u64 {}
unsafe impl AllBytesValid for i8 {}
unsafe impl AllBytesValid for i16 {}
unsafe impl AllBytesValid for i32 {}
unsafe impl AllBytesValid for i64 {}
unsafe impl AllBytesValid for f32 {}
unsafe impl AllBytesValid for f64 {}
macro_rules! tuples {
($(($($t:ident)*))*) => ($(
unsafe impl <$($t:AllBytesValid,)*> AllBytesValid for ($($t,)*) {}
)*)
}
tuples! {
()
(T1)
(T1 T2)
(T1 T2 T3)
(T1 T2 T3 T4)
(T1 T2 T3 T4 T5)
(T1 T2 T3 T4 T5 T6)
(T1 T2 T3 T4 T5 T6 T7)
(T1 T2 T3 T4 T5 T6 T7 T8)
(T1 T2 T3 T4 T5 T6 T7 T8 T9)
(T1 T2 T3 T4 T5 T6 T7 T8 T9 T10)
}
/// Represents a contiguous region in memory.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Region {
pub start: u32,
pub len: u32,
}
impl Region {
/// Checks if this `Region` overlaps with `rhs` `Region`.
fn overlaps(&self, rhs: Region) -> bool {
// Zero-length regions can never overlap!
if self.len == 0 || rhs.len == 0 {
return false;
}
let self_start = self.start as u64;
let self_end = self_start + (self.len - 1) as u64;
let rhs_start = rhs.start as u64;
let rhs_end = rhs_start + (rhs.len - 1) as u64;
if self_start <= rhs_start {
self_end >= rhs_start
} else {
rhs_end >= self_start
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn nonoverlapping() {
let mut bytes = [0; 100];
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice::<u8>(0, 10).unwrap();
bc.slice::<u8>(10, 10).unwrap();
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice::<u8>(10, 10).unwrap();
bc.slice::<u8>(0, 10).unwrap();
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice_mut::<u8>(0, 10).unwrap();
bc.slice_mut::<u8>(10, 10).unwrap();
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice_mut::<u8>(10, 10).unwrap();
bc.slice_mut::<u8>(0, 10).unwrap();
}
#[test]
fn overlapping() {
let mut bytes = [0; 100];
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice::<u8>(0, 10).unwrap();
bc.slice_mut::<u8>(9, 10).unwrap_err();
bc.slice::<u8>(9, 10).unwrap();
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice::<u8>(0, 10).unwrap();
bc.slice_mut::<u8>(2, 5).unwrap_err();
bc.slice::<u8>(2, 5).unwrap();
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice::<u8>(9, 10).unwrap();
bc.slice_mut::<u8>(0, 10).unwrap_err();
bc.slice::<u8>(0, 10).unwrap();
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice::<u8>(2, 5).unwrap();
bc.slice_mut::<u8>(0, 10).unwrap_err();
bc.slice::<u8>(0, 10).unwrap();
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice::<u8>(2, 5).unwrap();
bc.slice::<u8>(10, 5).unwrap();
bc.slice::<u8>(15, 5).unwrap();
bc.slice_mut::<u8>(0, 10).unwrap_err();
bc.slice::<u8>(0, 10).unwrap();
}
#[test]
fn zero_length() {
let mut bytes = [0; 100];
let mut bc = BorrowChecker::new(&mut bytes);
bc.slice_mut::<u8>(0, 0).unwrap();
bc.slice_mut::<u8>(0, 0).unwrap();
bc.slice::<u8>(0, 1).unwrap();
}
}

View File

@@ -0,0 +1,72 @@
use std::fmt;
use std::mem;
pub struct Slab<T> {
storage: Vec<Entry<T>>,
next: usize,
}
enum Entry<T> {
Full(T),
Empty { next: usize },
}
impl<T> Slab<T> {
pub fn insert(&mut self, item: T) -> u32 {
if self.next == self.storage.len() {
self.storage.push(Entry::Empty {
next: self.next + 1,
});
}
let ret = self.next as u32;
let entry = Entry::Full(item);
self.next = match mem::replace(&mut self.storage[self.next], entry) {
Entry::Empty { next } => next,
_ => unreachable!(),
};
return ret;
}
pub fn get(&self, idx: u32) -> Option<&T> {
match self.storage.get(idx as usize)? {
Entry::Full(b) => Some(b),
Entry::Empty { .. } => None,
}
}
pub fn get_mut(&mut self, idx: u32) -> Option<&mut T> {
match self.storage.get_mut(idx as usize)? {
Entry::Full(b) => Some(b),
Entry::Empty { .. } => None,
}
}
pub fn remove(&mut self, idx: u32) -> Option<T> {
let slot = self.storage.get_mut(idx as usize)?;
match mem::replace(slot, Entry::Empty { next: self.next }) {
Entry::Full(b) => {
self.next = idx as usize;
Some(b)
}
Entry::Empty { next } => {
*slot = Entry::Empty { next };
None
}
}
}
}
impl<T> Default for Slab<T> {
fn default() -> Slab<T> {
Slab {
storage: Vec::new(),
next: 0,
}
}
}
impl<T: fmt::Debug> fmt::Debug for Slab<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Slab").finish()
}
}

View File

@@ -0,0 +1,144 @@
use std::convert::TryFrom;
use std::fmt;
use std::mem;
pub struct Table<T> {
elems: Vec<Slot<T>>,
next: usize,
}
#[derive(Debug)]
pub enum RemoveError {
NotAllocated,
}
enum Slot<T> {
Empty { next_empty: usize },
Full { item: Box<T> },
}
impl<T> Table<T> {
/// Creates a new empty table
pub fn new() -> Table<T> {
Table {
elems: Vec::new(),
next: 0,
}
}
/// Inserts an item into this table, returning the index that it was
/// inserted at.
pub fn insert(&mut self, item: T) -> u32 {
if self.next == self.elems.len() {
let next_empty = self.next + 1;
self.elems.push(Slot::Empty { next_empty });
}
let index = self.next;
let ret = u32::try_from(index).unwrap();
self.next = match &self.elems[index] {
Slot::Empty { next_empty } => *next_empty,
Slot::Full { .. } => unreachable!(),
};
self.elems[index] = Slot::Full {
item: Box::new(item),
};
return ret;
}
/// Borrows an item from this table.
///
/// Returns `None` if the index is not allocated at this time. Otherwise
/// returns `Some` with a borrow of the item from this table.
pub fn get(&self, item: u32) -> Option<&T> {
let index = usize::try_from(item).unwrap();
match self.elems.get(index)? {
Slot::Empty { .. } => None,
Slot::Full { item } => Some(item),
}
}
/// Removes an item from this table.
///
/// On success it returns back the original item.
pub fn remove(&mut self, item: u32) -> Result<T, RemoveError> {
let index = usize::try_from(item).unwrap();
let new_empty = Slot::Empty {
next_empty: self.next,
};
let slot = self.elems.get_mut(index).ok_or(RemoveError::NotAllocated)?;
// Assume that `item` is valid, and if it is, we can return quickly
match mem::replace(slot, new_empty) {
Slot::Full { item } => {
self.next = index;
Ok(*item)
}
// Oops `item` wasn't valid, put it back where we found it and then
// figure out why it was invalid
Slot::Empty { next_empty } => {
*slot = Slot::Empty { next_empty };
Err(RemoveError::NotAllocated)
}
}
}
}
impl<T> Default for Table<T> {
fn default() -> Table<T> {
Table::new()
}
}
impl<T> fmt::Debug for Table<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Table")
.field("capacity", &self.elems.capacity())
.finish()
}
}
impl fmt::Display for RemoveError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
RemoveError::NotAllocated => f.write_str("invalid handle index"),
}
}
}
impl std::error::Error for RemoveError {}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn simple() {
let mut table = Table::new();
assert_eq!(table.insert(0), 0);
assert_eq!(table.insert(100), 1);
assert_eq!(table.insert(200), 2);
assert_eq!(*table.get(0).unwrap(), 0);
assert_eq!(*table.get(1).unwrap(), 100);
assert_eq!(*table.get(2).unwrap(), 200);
assert!(table.get(100).is_none());
assert!(table.remove(0).is_ok());
assert!(table.get(0).is_none());
assert_eq!(table.insert(1), 0);
assert!(table.get(0).is_some());
table.get(1).unwrap();
assert!(table.remove(1).is_ok());
assert!(table.remove(1).is_err());
assert!(table.remove(2).is_ok());
assert!(table.remove(0).is_ok());
assert_eq!(table.insert(100), 0);
assert_eq!(table.insert(100), 2);
assert_eq!(table.insert(100), 1);
assert_eq!(table.insert(100), 3);
}
}