feat: add dependency

This commit is contained in:
2023-01-20 22:36:19 +08:00
parent 68e8d103b4
commit cf8e579f27
644 changed files with 150099 additions and 14 deletions

View File

@@ -0,0 +1,15 @@
[package]
name = "boa_gc"
description = "Garbage collector for the Boa JavaScript engine."
keywords = ["javascript", "js", "garbage", "memory"]
categories = ["command-line-utilities"]
version.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
rust-version.workspace = true
[dependencies]
boa_profiler.workspace = true
boa_macros.workspace = true

View File

@@ -0,0 +1,571 @@
//! A garbage collected cell implementation
use crate::trace::{Finalize, Trace};
use std::{
cell::{Cell, UnsafeCell},
cmp::Ordering,
fmt::{self, Debug, Display},
hash::Hash,
ops::{Deref, DerefMut},
};
/// `BorrowFlag` represent the internal state of a `GcCell` and
/// keeps track of the amount of current borrows.
#[derive(Copy, Clone)]
pub(crate) struct BorrowFlag(usize);
/// `BorrowState` represents the various states of a `BorrowFlag`
///
/// - Reading: the value is currently being read/borrowed.
/// - Writing: the value is currently being written/borrowed mutably.
/// - Unused: the value is currently unrooted.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub(crate) enum BorrowState {
Reading,
Writing,
Unused,
}
const ROOT: usize = 1;
const WRITING: usize = !1;
const UNUSED: usize = 0;
/// The base borrowflag init is rooted, and has no outstanding borrows.
pub(crate) const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(ROOT);
impl BorrowFlag {
/// Check the current `BorrowState` of `BorrowFlag`.
pub(crate) const fn borrowed(self) -> BorrowState {
match self.0 & !ROOT {
UNUSED => BorrowState::Unused,
WRITING => BorrowState::Writing,
_ => BorrowState::Reading,
}
}
/// Check whether the borrow bit is flagged.
pub(crate) const fn rooted(self) -> bool {
self.0 & ROOT > 0
}
/// Set the `BorrowFlag`'s state to writing.
pub(crate) const fn set_writing(self) -> Self {
// Set every bit other than the root bit, which is preserved
Self(self.0 | WRITING)
}
/// Remove the root flag on `BorrowFlag`
pub(crate) const fn set_unused(self) -> Self {
// Clear every bit other than the root bit, which is preserved
Self(self.0 & ROOT)
}
/// Increments the counter for a new borrow.
///
/// # Panic
/// - This method will panic if the current `BorrowState` is writing.
/// - This method will panic after incrementing if the borrow count overflows.
pub(crate) fn add_reading(self) -> Self {
assert!(self.borrowed() != BorrowState::Writing);
// Add 1 to the integer starting at the second binary digit. As our
// borrowstate is not writing, we know that overflow cannot happen, so
// this is equivalent to the following, more complicated, expression:
//
// BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) + 1) << 1))
let flags = Self(self.0 + 0b10);
// This will fail if the borrow count overflows, which shouldn't happen,
// but let's be safe
{
assert!(flags.borrowed() == BorrowState::Reading);
}
flags
}
/// Decrements the counter to remove a borrow.
///
/// # Panic
/// - This method will panic if the current `BorrowState` is not reading.
pub(crate) fn sub_reading(self) -> Self {
assert!(self.borrowed() == BorrowState::Reading);
// Subtract 1 from the integer starting at the second binary digit. As
// our borrowstate is not writing or unused, we know that overflow or
// undeflow cannot happen, so this is equivalent to the following, more
// complicated, expression:
//
// BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) - 1) << 1))
Self(self.0 - 0b10)
}
/// Set the root flag on the `BorrowFlag`.
pub(crate) fn set_rooted(self, rooted: bool) -> Self {
// Preserve the non-root bits
Self((self.0 & !ROOT) | (usize::from(rooted)))
}
}
impl Debug for BorrowFlag {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("BorrowFlag")
.field("Rooted", &self.rooted())
.field("State", &self.borrowed())
.finish()
}
}
/// A mutable memory location with dynamically checked borrow rules
/// that can be used inside of a garbage-collected pointer.
///
/// This object is a `RefCell` that can be used inside of a `Gc<T>`.
pub struct GcCell<T: ?Sized + 'static> {
pub(crate) flags: Cell<BorrowFlag>,
pub(crate) cell: UnsafeCell<T>,
}
impl<T: Trace> GcCell<T> {
/// Creates a new `GcCell` containing `value`.
pub const fn new(value: T) -> Self {
Self {
flags: Cell::new(BORROWFLAG_INIT),
cell: UnsafeCell::new(value),
}
}
/// Consumes the `GcCell`, returning the wrapped value.
pub fn into_inner(self) -> T {
self.cell.into_inner()
}
}
impl<T: Trace + ?Sized> GcCell<T> {
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `GcCellRef` exits scope.
/// Multiple immutable borrows can be taken out at the same time.
///
/// # Panics
///
/// Panics if the value is currently mutably borrowed.
pub fn borrow(&self) -> GcCellRef<'_, T> {
match self.try_borrow() {
Ok(value) => value,
Err(e) => panic!("{}", e),
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `GcCellRefMut` exits scope.
/// The value cannot be borrowed while this borrow is active.
///
/// # Panics
///
/// Panics if the value is currently borrowed.
pub fn borrow_mut(&self) -> GcCellRefMut<'_, T> {
match self.try_borrow_mut() {
Ok(value) => value,
Err(e) => panic!("{}", e),
}
}
/// Immutably borrows the wrapped value, returning an error if the value is currently mutably
/// borrowed.
///
/// The borrow lasts until the returned `GcCellRef` exits scope. Multiple immutable borrows can be
/// taken out at the same time.
///
/// This is the non-panicking variant of [`borrow`](#method.borrow).
///
/// # Errors
///
/// Returns an `Err` if the value is currently mutably borrowed.
pub fn try_borrow(&self) -> Result<GcCellRef<'_, T>, BorrowError> {
if self.flags.get().borrowed() == BorrowState::Writing {
return Err(BorrowError);
}
self.flags.set(self.flags.get().add_reading());
// SAFETY: calling value on a rooted value may cause Undefined Behavior
unsafe {
Ok(GcCellRef {
flags: &self.flags,
value: &*self.cell.get(),
})
}
}
/// Mutably borrows the wrapped value, returning an error if the value is currently borrowed.
///
/// The borrow lasts until the returned `GcCellRefMut` exits scope.
/// The value cannot be borrowed while this borrow is active.
///
/// This is the non-panicking variant of [`borrow_mut`](#method.borrow_mut).
///
/// # Errors
///
/// Returns an `Err` if the value is currently borrowed.
pub fn try_borrow_mut(&self) -> Result<GcCellRefMut<'_, T>, BorrowMutError> {
if self.flags.get().borrowed() != BorrowState::Unused {
return Err(BorrowMutError);
}
self.flags.set(self.flags.get().set_writing());
// SAFETY: This is safe as the value is rooted if it was not previously rooted,
// so it cannot be dropped.
unsafe {
// Force the val_ref's contents to be rooted for the duration of the
// mutable borrow
if !self.flags.get().rooted() {
(*self.cell.get()).root();
}
Ok(GcCellRefMut {
gc_cell: self,
value: &mut *self.cell.get(),
})
}
}
}
/// An error returned by [`GcCell::try_borrow`](struct.GcCell.html#method.try_borrow).
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Default, Hash)]
pub struct BorrowError;
impl Display for BorrowError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt("GcCell<T> already mutably borrowed", f)
}
}
/// An error returned by [`GcCell::try_borrow_mut`](struct.GcCell.html#method.try_borrow_mut).
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Default, Hash)]
pub struct BorrowMutError;
impl Display for BorrowMutError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt("GcCell<T> already borrowed", f)
}
}
impl<T: Trace + ?Sized> Finalize for GcCell<T> {}
// SAFETY: GcCell maintains it's own BorrowState and rootedness. GcCell's implementation
// focuses on only continuing Trace based methods while the cell state is not written.
// Implementing a Trace while the cell is being written to or incorrectly implementing Trace
// on GcCell's value may cause Undefined Behavior
unsafe impl<T: Trace + ?Sized> Trace for GcCell<T> {
unsafe fn trace(&self) {
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).trace() },
}
}
unsafe fn weak_trace(&self) {
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).weak_trace() },
}
}
unsafe fn root(&self) {
assert!(!self.flags.get().rooted(), "Can't root a GcCell twice!");
self.flags.set(self.flags.get().set_rooted(true));
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).root() },
}
}
unsafe fn unroot(&self) {
assert!(self.flags.get().rooted(), "Can't unroot a GcCell twice!");
self.flags.set(self.flags.get().set_rooted(false));
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).unroot() },
}
}
fn run_finalizer(&self) {
Finalize::finalize(self);
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).run_finalizer() },
}
}
}
/// A wrapper type for an immutably borrowed value from a `GcCell<T>`.
pub struct GcCellRef<'a, T: ?Sized + 'static> {
pub(crate) flags: &'a Cell<BorrowFlag>,
pub(crate) value: &'a T,
}
impl<'a, T: ?Sized> GcCellRef<'a, T> {
/// Copies a `GcCellRef`.
///
/// The `GcCell` is already immutably borrowed, so this cannot fail.
///
/// This is an associated function that needs to be used as
/// `GcCellRef::clone(...)`. A `Clone` implementation or a method
/// would interfere with the use of `c.borrow().clone()` to clone
/// the contents of a `GcCell`.
#[allow(clippy::should_implement_trait)]
#[must_use]
pub fn clone(orig: &GcCellRef<'a, T>) -> GcCellRef<'a, T> {
orig.flags.set(orig.flags.get().add_reading());
GcCellRef {
flags: orig.flags,
value: orig.value,
}
}
/// Makes a new `GcCellRef` from a component of the borrowed data.
///
/// The `GcCell` is already immutably borrowed, so this cannot fail.
///
/// This is an associated function that needs to be used as `GcCellRef::map(...)`.
/// A method would interfere with methods of the same name on the contents
/// of a `GcCellRef` used through `Deref`.
pub fn map<U, F>(orig: Self, f: F) -> GcCellRef<'a, U>
where
U: ?Sized,
F: FnOnce(&T) -> &U,
{
let ret = GcCellRef {
flags: orig.flags,
value: f(orig.value),
};
// We have to tell the compiler not to call the destructor of GcCellRef,
// because it will update the borrow flags.
std::mem::forget(orig);
ret
}
/// Splits a `GcCellRef` into multiple `GcCellRef`s for different components of the borrowed data.
///
/// The `GcCell` is already immutably borrowed, so this cannot fail.
///
/// This is an associated function that needs to be used as `GcCellRef::map_split(...)`.
/// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`.
pub fn map_split<U, V, F>(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>)
where
U: ?Sized,
V: ?Sized,
F: FnOnce(&T) -> (&U, &V),
{
let (a, b) = f(orig.value);
orig.flags.set(orig.flags.get().add_reading());
let ret = (
GcCellRef {
flags: orig.flags,
value: a,
},
GcCellRef {
flags: orig.flags,
value: b,
},
);
// We have to tell the compiler not to call the destructor of GcCellRef,
// because it will update the borrow flags.
std::mem::forget(orig);
ret
}
}
impl<T: ?Sized> Deref for GcCellRef<'_, T> {
type Target = T;
fn deref(&self) -> &T {
self.value
}
}
impl<T: ?Sized> Drop for GcCellRef<'_, T> {
fn drop(&mut self) {
debug_assert!(self.flags.get().borrowed() == BorrowState::Reading);
self.flags.set(self.flags.get().sub_reading());
}
}
impl<T: ?Sized + Debug> Debug for GcCellRef<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Debug::fmt(&**self, f)
}
}
impl<T: ?Sized + Display> Display for GcCellRef<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&**self, f)
}
}
/// A wrapper type for a mutably borrowed value from a `GcCell<T>`.
pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> {
pub(crate) gc_cell: &'a GcCell<T>,
pub(crate) value: &'a mut U,
}
impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> {
/// Makes a new `GcCellRefMut` for a component of the borrowed data, e.g., an enum
/// variant.
///
/// The `GcCellRefMut` is already mutably borrowed, so this cannot fail.
///
/// This is an associated function that needs to be used as
/// `GcCellRefMut::map(...)`. A method would interfere with methods of the same
/// name on the contents of a `GcCell` used through `Deref`.
pub fn map<V, F>(orig: Self, f: F) -> GcCellRefMut<'a, T, V>
where
V: ?Sized,
F: FnOnce(&mut U) -> &mut V,
{
// SAFETY: This is safe as `GcCellRefMut` is already borrowed, so the value is rooted.
#[allow(trivial_casts)]
let value = unsafe { &mut *(orig.value as *mut U) };
let ret = GcCellRefMut {
gc_cell: orig.gc_cell,
value: f(value),
};
// We have to tell the compiler not to call the destructor of GcCellRefMut,
// because it will update the borrow flags.
std::mem::forget(orig);
ret
}
}
impl<T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'_, T, U> {
type Target = U;
fn deref(&self) -> &U {
self.value
}
}
impl<T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'_, T, U> {
fn deref_mut(&mut self) -> &mut U {
self.value
}
}
impl<T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'_, T, U> {
fn drop(&mut self) {
debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing);
// Restore the rooted state of the GcCell's contents to the state of the GcCell.
// During the lifetime of the GcCellRefMut, the GcCell's contents are rooted.
if !self.gc_cell.flags.get().rooted() {
// SAFETY: If `GcCell` is no longer rooted, then unroot it. This should be safe
// as the internal `GcBox` should be guaranteed to have at least 1 root.
unsafe {
(*self.gc_cell.cell.get()).unroot();
}
}
self.gc_cell
.flags
.set(self.gc_cell.flags.get().set_unused());
}
}
impl<T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'_, T, U> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Debug::fmt(&**self, f)
}
}
impl<T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'_, T, U> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&**self, f)
}
}
// SAFETY: GcCell<T> tracks it's `BorrowState` is `Writing`
unsafe impl<T: ?Sized + Send> Send for GcCell<T> {}
impl<T: Trace + Clone> Clone for GcCell<T> {
fn clone(&self) -> Self {
Self::new(self.borrow().clone())
}
}
impl<T: Trace + Default> Default for GcCell<T> {
fn default() -> Self {
Self::new(Default::default())
}
}
#[allow(clippy::inline_always)]
impl<T: Trace + ?Sized + PartialEq> PartialEq for GcCell<T> {
#[inline(always)]
fn eq(&self, other: &Self) -> bool {
*self.borrow() == *other.borrow()
}
}
impl<T: Trace + ?Sized + Eq> Eq for GcCell<T> {}
#[allow(clippy::inline_always)]
impl<T: Trace + ?Sized + PartialOrd> PartialOrd for GcCell<T> {
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
(*self.borrow()).partial_cmp(&*other.borrow())
}
#[inline(always)]
fn lt(&self, other: &Self) -> bool {
*self.borrow() < *other.borrow()
}
#[inline(always)]
fn le(&self, other: &Self) -> bool {
*self.borrow() <= *other.borrow()
}
#[inline(always)]
fn gt(&self, other: &Self) -> bool {
*self.borrow() > *other.borrow()
}
#[inline(always)]
fn ge(&self, other: &Self) -> bool {
*self.borrow() >= *other.borrow()
}
}
impl<T: Trace + ?Sized + Ord> Ord for GcCell<T> {
fn cmp(&self, other: &Self) -> Ordering {
(*self.borrow()).cmp(&*other.borrow())
}
}
impl<T: Trace + ?Sized + Debug> Debug for GcCell<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.flags.get().borrowed() {
BorrowState::Unused | BorrowState::Reading => f
.debug_struct("GcCell")
.field("flags", &self.flags.get())
.field("value", &self.borrow())
.finish(),
BorrowState::Writing => f
.debug_struct("GcCell")
.field("flags", &self.flags.get())
.field("value", &"<borrowed>")
.finish(),
}
}
}

View File

@@ -0,0 +1,115 @@
use crate::{finalizer_safe, trace::Trace, Finalize, Gc, GcBox};
use std::{cell::Cell, ptr::NonNull};
/// The inner allocation of an [`Ephemeron`][crate::Ephemeron] pointer.
pub(crate) struct EphemeronBox<K: Trace + ?Sized + 'static, V: Trace + ?Sized + 'static> {
key: Cell<Option<NonNull<GcBox<K>>>>,
value: V,
}
impl<K: Trace + ?Sized, V: Trace> EphemeronBox<K, V> {
pub(crate) fn new(key: &Gc<K>, value: V) -> Self {
Self {
key: Cell::new(Some(key.inner_ptr())),
value,
}
}
}
impl<K: Trace + ?Sized, V: Trace + ?Sized> EphemeronBox<K, V> {
/// Checks if the key pointer is marked by Trace
pub(crate) fn is_marked(&self) -> bool {
self.inner_key().map_or(false, GcBox::is_marked)
}
/// Returns some pointer to the `key`'s `GcBox` or None
/// # Panics
/// This method will panic if called while the garbage collector is dropping.
pub(crate) fn inner_key_ptr(&self) -> Option<*mut GcBox<K>> {
assert!(finalizer_safe());
self.key.get().map(NonNull::as_ptr)
}
/// Returns some reference to `key`'s `GcBox` or None
pub(crate) fn inner_key(&self) -> Option<&GcBox<K>> {
// SAFETY: This is safe as `EphemeronBox::inner_key_ptr()` will
// fetch either a live `GcBox` or None. The value of `key` is set
// to None in the case where `EphemeronBox` and `key`'s `GcBox`
// entered into `Collector::sweep()` as unmarked.
unsafe { self.inner_key_ptr().map(|inner_key| &*inner_key) }
}
/// Returns a reference to the value of `key`'s `GcBox`
pub(crate) fn key(&self) -> Option<&K> {
self.inner_key().map(GcBox::value)
}
/// Returns a reference to `value`
pub(crate) const fn value(&self) -> &V {
&self.value
}
/// Calls [`Trace::weak_trace()`][crate::Trace] on key
fn weak_trace_key(&self) {
if let Some(key) = self.inner_key() {
key.weak_trace_inner();
}
}
/// Calls [`Trace::weak_trace()`][crate::Trace] on value
fn weak_trace_value(&self) {
// SAFETY: Value is a sized element that must implement trace. The
// operation is safe as EphemeronBox owns value and `Trace::weak_trace`
// must be implemented on it
unsafe {
self.value().weak_trace();
}
}
}
// `EphemeronBox`'s Finalize is special in that if it is determined to be unreachable
// and therefore so has the `GcBox` that `key`stores the pointer to, then we set `key`
// to None to guarantee that we do not access freed memory.
impl<K: Trace + ?Sized, V: Trace + ?Sized> Finalize for EphemeronBox<K, V> {
fn finalize(&self) {
self.key.set(None);
}
}
// SAFETY: EphemeronBox implements primarly two methods of trace `Trace::is_marked_ephemeron`
// to determine whether the key field is stored and `Trace::weak_trace` which continues the `Trace::weak_trace()`
// into `key` and `value`.
unsafe impl<K: Trace + ?Sized, V: Trace + ?Sized> Trace for EphemeronBox<K, V> {
unsafe fn trace(&self) {
/* An ephemeron is never traced with Phase One Trace */
}
/// Checks if the `key`'s `GcBox` has been marked by `Trace::trace()` or `Trace::weak_trace`.
fn is_marked_ephemeron(&self) -> bool {
self.is_marked()
}
/// Checks if this `EphemeronBox` has already been determined reachable. If so, continue to trace
/// value in `key` and `value`.
unsafe fn weak_trace(&self) {
if self.is_marked() {
self.weak_trace_key();
self.weak_trace_value();
}
}
// EphemeronBox does not implement root.
unsafe fn root(&self) {}
// EphemeronBox does not implement unroot
unsafe fn unroot(&self) {}
// An `EphemeronBox`'s key is set to None once it has been finalized.
//
// NOTE: while it is possible for the `key`'s pointer value to be
// resurrected, we should still consider the finalize the ephemeron
// box and set the `key` to None.
fn run_finalizer(&self) {
Finalize::finalize(self);
}
}

View File

@@ -0,0 +1,185 @@
use crate::Trace;
use std::{
cell::Cell,
fmt,
ptr::{self, NonNull},
};
// Age and Weak Flags
const MARK_MASK: usize = 1 << (usize::BITS - 2);
const WEAK_MASK: usize = 1 << (usize::BITS - 1);
const ROOTS_MASK: usize = !(MARK_MASK | WEAK_MASK);
const ROOTS_MAX: usize = ROOTS_MASK;
/// The `GcBoxheader` contains the `GcBox`'s current state for the `Collector`'s
/// Mark/Sweep as well as a pointer to the next node in the heap.
///
/// These flags include:
/// - Root Count
/// - Mark Flag Bit
/// - Weak Flag Bit
///
/// The next node is set by the `Allocator` during initialization and by the
/// `Collector` during the sweep phase.
pub(crate) struct GcBoxHeader {
roots: Cell<usize>,
pub(crate) next: Cell<Option<NonNull<GcBox<dyn Trace>>>>,
}
impl GcBoxHeader {
/// Creates a new `GcBoxHeader` with a root of 1 and next set to None.
pub(crate) fn new() -> Self {
Self {
roots: Cell::new(1),
next: Cell::new(None),
}
}
/// Creates a new `GcBoxHeader` with the Weak bit at 1 and roots of 1.
pub(crate) fn new_weak() -> Self {
// Set weak_flag
Self {
roots: Cell::new(WEAK_MASK | 1),
next: Cell::new(None),
}
}
/// Returns the `GcBoxHeader`'s current root count
pub(crate) fn roots(&self) -> usize {
self.roots.get() & ROOTS_MASK
}
/// Increments `GcBoxHeader`'s root count.
pub(crate) fn inc_roots(&self) {
let roots = self.roots.get();
if (roots & ROOTS_MASK) < ROOTS_MAX {
self.roots.set(roots + 1);
} else {
// TODO: implement a better way to handle root overload.
panic!("roots counter overflow");
}
}
/// Decreases `GcBoxHeader`'s current root count.
pub(crate) fn dec_roots(&self) {
// Underflow check as a stop gap for current issue when dropping.
if self.roots.get() > 0 {
self.roots.set(self.roots.get() - 1);
}
}
/// Returns a bool for whether `GcBoxHeader`'s mark bit is 1.
pub(crate) fn is_marked(&self) -> bool {
self.roots.get() & MARK_MASK != 0
}
/// Sets `GcBoxHeader`'s mark bit to 1.
pub(crate) fn mark(&self) {
self.roots.set(self.roots.get() | MARK_MASK);
}
/// Sets `GcBoxHeader`'s mark bit to 0.
pub(crate) fn unmark(&self) {
self.roots.set(self.roots.get() & !MARK_MASK);
}
/// Returns a bool for whether the `GcBoxHeader`'s weak bit is 1.
pub(crate) fn is_ephemeron(&self) -> bool {
self.roots.get() & WEAK_MASK != 0
}
}
impl fmt::Debug for GcBoxHeader {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GcBoxHeader")
.field("Roots", &self.roots())
.field("Weak", &self.is_ephemeron())
.field("Marked", &self.is_marked())
.finish()
}
}
/// A garbage collected allocation.
#[derive(Debug)]
pub struct GcBox<T: Trace + ?Sized + 'static> {
pub(crate) header: GcBoxHeader,
pub(crate) value: T,
}
impl<T: Trace> GcBox<T> {
/// Returns a new `GcBox` with a rooted `GcBoxHeader`.
pub(crate) fn new(value: T) -> Self {
Self {
header: GcBoxHeader::new(),
value,
}
}
/// Returns a new `GcBox` with a rooted and weak `GcBoxHeader`.
pub(crate) fn new_weak(value: T) -> Self {
Self {
header: GcBoxHeader::new_weak(),
value,
}
}
}
impl<T: Trace + ?Sized> GcBox<T> {
/// Returns `true` if the two references refer to the same `GcBox`.
pub(crate) fn ptr_eq(this: &Self, other: &Self) -> bool {
// Use .header to ignore fat pointer vtables, to work around
// https://github.com/rust-lang/rust/issues/46139
ptr::eq(&this.header, &other.header)
}
/// Marks this `GcBox` and marks through its data.
pub(crate) unsafe fn trace_inner(&self) {
if !self.header.is_marked() && !self.header.is_ephemeron() {
self.header.mark();
// SAFETY: if `GcBox::trace_inner()` has been called, then,
// this box must have been deemed as reachable via tracing
// from a root, which by extension means that value has not
// been dropped either.
unsafe {
self.value.trace();
}
}
}
/// Trace inner data and search for ephemerons to add to the ephemeron queue.
pub(crate) fn weak_trace_inner(&self) {
if !self.header.is_marked() && !self.header.is_ephemeron() {
self.header.mark();
// SAFETY: if a `GcBox` has `weak_trace_inner` called, then the inner.
// value must have been deemed as reachable.
unsafe {
self.value.weak_trace();
}
}
}
/// Increases the root count on this `GcBox`.
///
/// Roots prevent the `GcBox` from being destroyed by the garbage collector.
pub(crate) fn root_inner(&self) {
self.header.inc_roots();
}
/// Decreases the root count on this `GcBox`.
///
/// Roots prevent the `GcBox` from being destroyed by the garbage collector.
pub(crate) fn unroot_inner(&self) {
self.header.dec_roots();
}
/// Returns a reference to the `GcBox`'s value.
pub(crate) const fn value(&self) -> &T {
&self.value
}
/// Returns a bool for whether the header is marked.
pub(crate) fn is_marked(&self) -> bool {
self.header.is_marked()
}
}

View File

@@ -0,0 +1,5 @@
mod ephemeron_box;
mod gc_box;
pub(crate) use self::ephemeron_box::EphemeronBox;
pub use self::gc_box::GcBox;

View File

@@ -0,0 +1,426 @@
//! Boa's **`boa_gc`** crate implements a garbage collector.
//!
//! # Crate Overview
//! **`boa_gc`** is a mark-sweep garbage collector that implements a Trace and Finalize trait
//! for garbage collected values.
//!
//! # About Boa
//! Boa is an open-source, experimental ECMAScript Engine written in Rust for lexing, parsing and executing ECMAScript/JavaScript. Currently, Boa
//! supports some of the [language][boa-conformance]. More information can be viewed at [Boa's website][boa-web].
//!
//! Try out the most recent release with Boa's live demo [playground][boa-playground].
//!
//! # Boa Crates
//! - **`boa_ast`** - Boa's ECMAScript Abstract Syntax Tree.
//! - **`boa_engine`** - Boa's implementation of ECMAScript builtin objects and execution.
//! - **`boa_gc`** - Boa's garbage collector.
//! - **`boa_interner`** - Boa's string interner.
//! - **`boa_parser`** - Boa's lexer and parser.
//! - **`boa_profiler`** - Boa's code profiler.
//! - **`boa_unicode`** - Boa's Unicode identifier.
//! - **`boa_icu_provider`** - Boa's ICU4X data provider.
//!
//! [boa-conformance]: https://boa-dev.github.io/boa/test262/
//! [boa-web]: https://boa-dev.github.io/
//! [boa-playground]: https://boa-dev.github.io/boa/playground/
#![doc(
html_logo_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg"
)]
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
#![allow(
clippy::module_name_repetitions,
clippy::redundant_pub_crate,
clippy::let_unit_value
)]
extern crate self as boa_gc;
mod cell;
mod pointers;
mod trace;
pub(crate) mod internals;
use boa_profiler::Profiler;
use std::{
cell::{Cell, RefCell},
mem,
ptr::NonNull,
};
pub use crate::trace::{Finalize, Trace};
pub use boa_macros::{Finalize, Trace};
pub use cell::{GcCell, GcCellRef, GcCellRefMut};
pub use internals::GcBox;
pub use pointers::{Ephemeron, Gc, WeakGc};
type GcPointer = NonNull<GcBox<dyn Trace>>;
thread_local!(static EPHEMERON_QUEUE: Cell<Option<Vec<GcPointer>>> = Cell::new(None));
thread_local!(static GC_DROPPING: Cell<bool> = Cell::new(false));
thread_local!(static BOA_GC: RefCell<BoaGc> = RefCell::new( BoaGc {
config: GcConfig::default(),
runtime: GcRuntimeData::default(),
adult_start: Cell::new(None),
}));
#[derive(Debug, Clone, Copy)]
struct GcConfig {
threshold: usize,
used_space_percentage: usize,
}
// Setting the defaults to an arbitrary value currently.
//
// TODO: Add a configure later
impl Default for GcConfig {
fn default() -> Self {
Self {
threshold: 1024,
used_space_percentage: 80,
}
}
}
#[derive(Default, Debug, Clone, Copy)]
struct GcRuntimeData {
collections: usize,
bytes_allocated: usize,
}
#[derive(Debug)]
struct BoaGc {
config: GcConfig,
runtime: GcRuntimeData,
adult_start: Cell<Option<GcPointer>>,
}
impl Drop for BoaGc {
fn drop(&mut self) {
Collector::dump(self);
}
}
// Whether or not the thread is currently in the sweep phase of garbage collection.
// During this phase, attempts to dereference a `Gc<T>` pointer will trigger a panic.
/// `DropGuard` flags whether the Collector is currently running `Collector::sweep()` or `Collector::dump()`
///
/// While the `DropGuard` is active, all `GcBox`s must not be dereferenced or accessed as it could cause Undefined Behavior
#[derive(Debug, Clone)]
struct DropGuard;
impl DropGuard {
fn new() -> Self {
GC_DROPPING.with(|dropping| dropping.set(true));
Self
}
}
impl Drop for DropGuard {
fn drop(&mut self) {
GC_DROPPING.with(|dropping| dropping.set(false));
}
}
/// Returns `true` if it is safe for a type to run [`Finalize::finalize`].
#[must_use]
#[inline]
pub fn finalizer_safe() -> bool {
GC_DROPPING.with(|dropping| !dropping.get())
}
/// The Allocator handles allocation of garbage collected values.
///
/// The allocator can trigger a garbage collection.
#[derive(Debug, Clone, Copy)]
struct Allocator;
impl Allocator {
/// Allocate a new garbage collected value to the Garbage Collector's heap.
fn allocate<T: Trace>(value: GcBox<T>) -> NonNull<GcBox<T>> {
let _timer = Profiler::global().start_event("New Pointer", "BoaAlloc");
let element_size = mem::size_of_val::<GcBox<T>>(&value);
BOA_GC.with(|st| {
let mut gc = st.borrow_mut();
Self::manage_state(&mut gc);
value.header.next.set(gc.adult_start.take());
// Safety: Value Cannot be a null as it must be a GcBox<T>
let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::from(value))) };
gc.adult_start.set(Some(ptr));
gc.runtime.bytes_allocated += element_size;
ptr
})
}
fn manage_state(gc: &mut BoaGc) {
if gc.runtime.bytes_allocated > gc.config.threshold {
Collector::run_full_collection(gc);
if gc.runtime.bytes_allocated
> gc.config.threshold / 100 * gc.config.used_space_percentage
{
gc.config.threshold =
gc.runtime.bytes_allocated / gc.config.used_space_percentage * 100;
}
}
}
}
// This collector currently functions in four main phases
//
// Mark -> Finalize -> Mark -> Sweep
//
// Mark nodes as reachable then finalize the unreachable nodes. A remark phase
// then needs to be retriggered as finalization can potentially resurrect dead
// nodes.
//
// A better approach in a more concurrent structure may be to reorder.
//
// Mark -> Sweep -> Finalize
struct Collector;
impl Collector {
/// Run a collection on the full heap.
fn run_full_collection(gc: &mut BoaGc) {
let _timer = Profiler::global().start_event("Gc Full Collection", "gc");
gc.runtime.collections += 1;
let unreachable_adults = Self::mark_heap(&gc.adult_start);
// Check if any unreachable nodes were found and finalize
if !unreachable_adults.is_empty() {
// SAFETY: Please see `Collector::finalize()`
unsafe { Self::finalize(unreachable_adults) };
}
let _final_unreachable_adults = Self::mark_heap(&gc.adult_start);
// SAFETY: Please see `Collector::sweep()`
unsafe {
Self::sweep(&gc.adult_start, &mut gc.runtime.bytes_allocated);
}
}
/// Walk the heap and mark any nodes deemed reachable
fn mark_heap(head: &Cell<Option<NonNull<GcBox<dyn Trace>>>>) -> Vec<NonNull<GcBox<dyn Trace>>> {
let _timer = Profiler::global().start_event("Gc Marking", "gc");
// Walk the list, tracing and marking the nodes
let mut finalize = Vec::new();
let mut ephemeron_queue = Vec::new();
let mut mark_head = head;
while let Some(node) = mark_head.get() {
// SAFETY: node must be valid as it is coming directly from the heap.
let node_ref = unsafe { node.as_ref() };
if node_ref.header.is_ephemeron() {
ephemeron_queue.push(node);
} else if node_ref.header.roots() > 0 {
// SAFETY: the reference to node must be valid as it is rooted. Passing
// invalid references can result in Undefined Behavior
unsafe {
node_ref.trace_inner();
}
} else {
finalize.push(node);
}
mark_head = &node_ref.header.next;
}
// Ephemeron Evaluation
if !ephemeron_queue.is_empty() {
ephemeron_queue = Self::mark_ephemerons(ephemeron_queue);
}
// Any left over nodes in the ephemeron queue at this point are
// unreachable and need to be notified/finalized.
finalize.extend(ephemeron_queue);
finalize
}
// Tracing Ephemerons/Weak is always requires tracing the inner nodes in case it ends up marking unmarked node
//
// Time complexity should be something like O(nd) where d is the longest chain of epehemerons
/// Mark any ephemerons that are deemed live and trace their fields.
fn mark_ephemerons(
initial_queue: Vec<NonNull<GcBox<dyn Trace>>>,
) -> Vec<NonNull<GcBox<dyn Trace>>> {
let mut ephemeron_queue = initial_queue;
loop {
// iterate through ephemeron queue, sorting nodes by whether they
// are reachable or unreachable<?>
let (reachable, other): (Vec<_>, Vec<_>) =
ephemeron_queue.into_iter().partition(|node| {
// SAFETY: Any node on the eph_queue or the heap must be non null
let node = unsafe { node.as_ref() };
if node.value.is_marked_ephemeron() {
node.header.mark();
true
} else {
node.header.roots() > 0
}
});
// Replace the old queue with the unreachable<?>
ephemeron_queue = other;
// If reachable nodes is not empty, trace values. If it is empty,
// break from the loop
if reachable.is_empty() {
break;
}
EPHEMERON_QUEUE.with(|state| state.set(Some(Vec::new())));
// iterate through reachable nodes and trace their values,
// enqueuing any ephemeron that is found during the trace
for node in reachable {
// TODO: deal with fetch ephemeron_queue
// SAFETY: Node must be a valid pointer or else it would not be deemed reachable.
unsafe {
node.as_ref().weak_trace_inner();
}
}
EPHEMERON_QUEUE.with(|st| {
if let Some(found_nodes) = st.take() {
ephemeron_queue.extend(found_nodes);
}
});
}
ephemeron_queue
}
/// # Safety
///
/// Passing a vec with invalid pointers will result in Undefined Behaviour.
unsafe fn finalize(finalize_vec: Vec<NonNull<GcBox<dyn Trace>>>) {
let _timer = Profiler::global().start_event("Gc Finalization", "gc");
for node in finalize_vec {
// We double check that the unreachable nodes are actually unreachable
// prior to finalization as they could have been marked by a different
// trace after initially being added to the queue
//
// SAFETY: The caller must ensure all pointers inside `finalize_vec` are valid.
let node = unsafe { node.as_ref() };
if !node.header.is_marked() {
Trace::run_finalizer(&node.value);
}
}
}
/// # Safety
///
/// - Providing an invalid pointer in the `heap_start` or in any of the headers of each
/// node will result in Undefined Behaviour.
/// - Providing a list of pointers that weren't allocated by `Box::into_raw(Box::new(..))`
/// will result in Undefined Behaviour.
unsafe fn sweep(
heap_start: &Cell<Option<NonNull<GcBox<dyn Trace>>>>,
total_allocated: &mut usize,
) {
let _timer = Profiler::global().start_event("Gc Sweeping", "gc");
let _guard = DropGuard::new();
let mut sweep_head = heap_start;
while let Some(node) = sweep_head.get() {
// SAFETY: The caller must ensure the validity of every node of `heap_start`.
let node_ref = unsafe { node.as_ref() };
if node_ref.is_marked() {
node_ref.header.unmark();
sweep_head = &node_ref.header.next;
} else if node_ref.header.is_ephemeron() && node_ref.header.roots() > 0 {
// Keep the ephemeron box's alive if rooted, but note that it's pointer is no longer safe
Trace::run_finalizer(&node_ref.value);
sweep_head = &node_ref.header.next;
} else {
// SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped.
// The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
let unallocated_bytes = mem::size_of_val::<GcBox<_>>(&*unmarked_node);
*total_allocated -= unallocated_bytes;
sweep_head.set(unmarked_node.header.next.take());
}
}
}
// Clean up the heap when BoaGc is dropped
fn dump(gc: &mut BoaGc) {
// Not initializing a dropguard since this should only be invoked when BOA_GC is being dropped.
let _guard = DropGuard::new();
let sweep_head = &gc.adult_start;
while let Some(node) = sweep_head.get() {
// SAFETY:
// The `Allocator` must always ensure its start node is a valid, non-null pointer that
// was allocated by `Box::from_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
sweep_head.set(unmarked_node.header.next.take());
}
}
}
/// Forcefully runs a garbage collection of all unaccessible nodes.
pub fn force_collect() {
BOA_GC.with(|current| {
let mut gc = current.borrow_mut();
if gc.runtime.bytes_allocated > 0 {
Collector::run_full_collection(&mut gc);
}
});
}
#[cfg(test)]
mod test;

View File

@@ -0,0 +1,112 @@
use crate::{
finalizer_safe,
internals::EphemeronBox,
trace::{Finalize, Trace},
Allocator, Gc, GcBox, EPHEMERON_QUEUE,
};
use std::{cell::Cell, ptr::NonNull};
#[derive(Debug)]
/// A key-value pair where the value becomes unaccesible when the key is garbage collected.
///
/// See Racket's explanation on [**ephemerons**][eph] for a brief overview or read Barry Hayes'
/// [_Ephemerons_: a new finalization mechanism][acm].
///
///
/// [eph]: https://docs.racket-lang.org/reference/ephemerons.html
/// [acm]: https://dl.acm.org/doi/10.1145/263700.263733
pub struct Ephemeron<K: Trace + ?Sized + 'static, V: Trace + 'static> {
inner_ptr: Cell<NonNull<GcBox<EphemeronBox<K, V>>>>,
}
impl<K: Trace + ?Sized, V: Trace> Ephemeron<K, V> {
/// Creates a new `Ephemeron`.
pub fn new(key: &Gc<K>, value: V) -> Self {
Self {
inner_ptr: Cell::new(Allocator::allocate(GcBox::new_weak(EphemeronBox::new(
key, value,
)))),
}
}
}
impl<K: Trace + ?Sized, V: Trace> Ephemeron<K, V> {
fn inner_ptr(&self) -> NonNull<GcBox<EphemeronBox<K, V>>> {
self.inner_ptr.get()
}
fn inner(&self) -> &GcBox<EphemeronBox<K, V>> {
// SAFETY: GcBox<EphemeronBox<K,V>> must live until it is unrooted by Drop
unsafe { &*self.inner_ptr().as_ptr() }
}
/// Gets the weak key of this `Ephemeron`, or `None` if the key was already garbage
/// collected.
pub fn key(&self) -> Option<&K> {
self.inner().value().key()
}
/// Gets the stored value of this `Ephemeron`.
pub fn value(&self) -> &V {
self.inner().value().value()
}
/// Gets a `Gc` for the stored key of this `Ephemeron`.
pub fn upgrade_key(&self) -> Option<Gc<K>> {
// SAFETY: ptr must be a valid pointer or None would have been returned.
self.inner().value().inner_key_ptr().map(|ptr| unsafe {
let inner_ptr = NonNull::new_unchecked(ptr);
Gc::from_ptr(inner_ptr)
})
}
}
impl<K: Trace, V: Trace> Finalize for Ephemeron<K, V> {}
// SAFETY: Ephemerons trace implementation is standard for everything except `Trace::weak_trace()`,
// which pushes the GcBox<EphemeronBox<_>> onto the EphemeronQueue
unsafe impl<K: Trace, V: Trace> Trace for Ephemeron<K, V> {
unsafe fn trace(&self) {}
// Push this Ephemeron's pointer onto the EphemeronQueue
unsafe fn weak_trace(&self) {
EPHEMERON_QUEUE.with(|q| {
let mut queue = q.take().expect("queue is initialized by weak_trace");
queue.push(self.inner_ptr());
});
}
unsafe fn root(&self) {}
unsafe fn unroot(&self) {}
fn run_finalizer(&self) {
Finalize::finalize(self);
}
}
impl<K: Trace + ?Sized, V: Trace> Clone for Ephemeron<K, V> {
fn clone(&self) -> Self {
// SAFETY: This is safe because the inner_ptr must live as long as it's roots.
// Mismanagement of roots can cause inner_ptr to use after free or Undefined
// Behavior.
unsafe {
let eph = Self {
inner_ptr: Cell::new(NonNull::new_unchecked(self.inner_ptr().as_ptr())),
};
// Increment the Ephemeron's GcBox roots by 1
self.inner().root_inner();
eph
}
}
}
impl<K: Trace + ?Sized, V: Trace> Drop for Ephemeron<K, V> {
fn drop(&mut self) {
// NOTE: We assert that this drop call is not a
// drop from `Collector::dump` or `Collector::sweep`
if finalizer_safe() {
self.inner().unroot_inner();
}
}
}

View File

@@ -0,0 +1,295 @@
use crate::{
finalizer_safe,
internals::GcBox,
trace::{Finalize, Trace},
Allocator,
};
use std::{
cell::Cell,
cmp::Ordering,
fmt::{self, Debug, Display},
hash::{Hash, Hasher},
marker::PhantomData,
ops::Deref,
ptr::{self, addr_of_mut, NonNull},
rc::Rc,
};
// Technically, this function is safe, since we're just modifying the address of a pointer without
// dereferencing it.
pub(crate) fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
// SAFETY: this should be safe as ptr must be a valid nonnull
unsafe {
ptr::write(addr_of_mut!(ptr).cast::<*mut u8>(), data.cast::<u8>());
}
ptr
}
/// A garbage-collected pointer type over an immutable value.
pub struct Gc<T: Trace + ?Sized + 'static> {
pub(crate) inner_ptr: Cell<NonNull<GcBox<T>>>,
pub(crate) marker: PhantomData<Rc<T>>,
}
impl<T: Trace> Gc<T> {
/// Constructs a new `Gc<T>` with the given value.
pub fn new(value: T) -> Self {
// Create GcBox and allocate it to heap.
//
// Note: Allocator can cause Collector to run
let inner_ptr = Allocator::allocate(GcBox::new(value));
// SAFETY: inner_ptr was just allocated, so it must be a valid value that implements [`Trace`]
unsafe { (*inner_ptr.as_ptr()).value().unroot() }
let gc = Self {
inner_ptr: Cell::new(inner_ptr),
marker: PhantomData,
};
gc.set_root();
gc
}
/// Consumes the `Gc`, returning a wrapped raw pointer.
///
/// To avoid a memory leak, the pointer must be converted back to a `Gc` using [`Gc::from_raw`].
#[allow(clippy::use_self)]
pub fn into_raw(this: Gc<T>) -> NonNull<GcBox<T>> {
let ptr = this.inner_ptr.get();
std::mem::forget(this);
ptr
}
}
impl<T: Trace + ?Sized> Gc<T> {
/// Returns `true` if the two `Gc`s point to the same allocation.
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
GcBox::ptr_eq(this.inner(), other.inner())
}
/// Constructs a `Gc<T>` from a raw pointer.
///
/// The raw pointer must have been returned by a previous call to [`Gc<U>::into_raw`][Gc::into_raw]
/// where `U` must have the same size and alignment as `T`.
///
/// # Safety
///
/// This function is unsafe because improper use may lead to memory corruption, double-free,
/// or misbehaviour of the garbage collector.
#[must_use]
pub const unsafe fn from_raw(ptr: NonNull<GcBox<T>>) -> Self {
Self {
inner_ptr: Cell::new(ptr),
marker: PhantomData,
}
}
/// Return a rooted `Gc` from a `GcBox` pointer
pub(crate) unsafe fn from_ptr(ptr: NonNull<GcBox<T>>) -> Self {
// SAFETY: the caller must ensure that the pointer is valid.
unsafe {
ptr.as_ref().root_inner();
let gc = Self {
inner_ptr: Cell::new(ptr),
marker: PhantomData,
};
gc.set_root();
gc
}
}
}
/// Returns the given pointer with its root bit cleared.
pub(crate) unsafe fn clear_root_bit<T: ?Sized + Trace>(
ptr: NonNull<GcBox<T>>,
) -> NonNull<GcBox<T>> {
let ptr = ptr.as_ptr();
let data = ptr.cast::<u8>();
let addr = data as isize;
let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr));
// SAFETY: ptr must be a non null value
unsafe { NonNull::new_unchecked(ptr) }
}
impl<T: Trace + ?Sized> Gc<T> {
fn rooted(&self) -> bool {
self.inner_ptr.get().as_ptr().cast::<u8>() as usize & 1 != 0
}
pub(crate) fn set_root(&self) {
let ptr = self.inner_ptr.get().as_ptr();
let data = ptr.cast::<u8>();
let addr = data as isize;
let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr));
// SAFETY: ptr must be a non null value.
unsafe {
self.inner_ptr.set(NonNull::new_unchecked(ptr));
}
}
fn clear_root(&self) {
// SAFETY: inner_ptr must be a valid non-null pointer to a live GcBox.
unsafe {
self.inner_ptr.set(clear_root_bit(self.inner_ptr.get()));
}
}
pub(crate) fn inner_ptr(&self) -> NonNull<GcBox<T>> {
assert!(finalizer_safe());
// SAFETY: inner_ptr must be a live GcBox. Calling this on a dropped GcBox
// can result in Undefined Behavior.
unsafe { clear_root_bit(self.inner_ptr.get()) }
}
fn inner(&self) -> &GcBox<T> {
// SAFETY: Please see Gc::inner_ptr()
unsafe { self.inner_ptr().as_ref() }
}
}
impl<T: Trace + ?Sized> Finalize for Gc<T> {}
// SAFETY: `Gc` maintains it's own rootedness and implements all methods of
// Trace. It is not possible to root an already rooted `Gc` and vice versa.
unsafe impl<T: Trace + ?Sized> Trace for Gc<T> {
unsafe fn trace(&self) {
// SAFETY: Inner must be live and allocated GcBox.
unsafe {
self.inner().trace_inner();
}
}
unsafe fn weak_trace(&self) {
self.inner().weak_trace_inner();
}
unsafe fn root(&self) {
assert!(!self.rooted(), "Can't double-root a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().root_inner();
self.set_root();
}
unsafe fn unroot(&self) {
assert!(self.rooted(), "Can't double-unroot a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().unroot_inner();
self.clear_root();
}
fn run_finalizer(&self) {
Finalize::finalize(self);
}
}
impl<T: Trace + ?Sized> Clone for Gc<T> {
fn clone(&self) -> Self {
// SAFETY: `&self` is a valid Gc pointer.
unsafe { Self::from_ptr(self.inner_ptr()) }
}
}
impl<T: Trace + ?Sized> Deref for Gc<T> {
type Target = T;
fn deref(&self) -> &T {
self.inner().value()
}
}
impl<T: Trace + ?Sized> Drop for Gc<T> {
fn drop(&mut self) {
// If this pointer was a root, we should unroot it.
if self.rooted() {
self.inner().unroot_inner();
}
}
}
impl<T: Trace + Default> Default for Gc<T> {
fn default() -> Self {
Self::new(Default::default())
}
}
#[allow(clippy::inline_always)]
impl<T: Trace + ?Sized + PartialEq> PartialEq for Gc<T> {
#[inline(always)]
fn eq(&self, other: &Self) -> bool {
**self == **other
}
}
impl<T: Trace + ?Sized + Eq> Eq for Gc<T> {}
#[allow(clippy::inline_always)]
impl<T: Trace + ?Sized + PartialOrd> PartialOrd for Gc<T> {
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
#[inline(always)]
fn lt(&self, other: &Self) -> bool {
**self < **other
}
#[inline(always)]
fn le(&self, other: &Self) -> bool {
**self <= **other
}
#[inline(always)]
fn gt(&self, other: &Self) -> bool {
**self > **other
}
#[inline(always)]
fn ge(&self, other: &Self) -> bool {
**self >= **other
}
}
impl<T: Trace + ?Sized + Ord> Ord for Gc<T> {
fn cmp(&self, other: &Self) -> Ordering {
(**self).cmp(&**other)
}
}
impl<T: Trace + ?Sized + Hash> Hash for Gc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
impl<T: Trace + ?Sized + Display> Display for Gc<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&**self, f)
}
}
impl<T: Trace + ?Sized + Debug> Debug for Gc<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Debug::fmt(&**self, f)
}
}
impl<T: Trace + ?Sized> fmt::Pointer for Gc<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.inner(), f)
}
}
impl<T: Trace + ?Sized> std::borrow::Borrow<T> for Gc<T> {
fn borrow(&self) -> &T {
self
}
}
impl<T: Trace + ?Sized> AsRef<T> for Gc<T> {
fn as_ref(&self) -> &T {
self
}
}

View File

@@ -0,0 +1,9 @@
//! Pointers represents the External types returned by the Boa Garbage Collector
mod ephemeron;
mod gc;
mod weak;
pub use ephemeron::Ephemeron;
pub use gc::Gc;
pub use weak::WeakGc;

View File

@@ -0,0 +1,46 @@
use crate::{Ephemeron, Finalize, Gc, Trace};
/// A weak reference to a [`Gc`].
///
/// This type allows keeping references to [`Gc`] managed values without keeping them alive for
/// garbage collections. However, this also means [`WeakGc::value`] can return `None` at any moment.
#[derive(Debug, Trace, Finalize)]
#[repr(transparent)]
pub struct WeakGc<T: Trace + ?Sized + 'static> {
inner: Ephemeron<T, ()>,
}
impl<T: Trace + ?Sized> WeakGc<T> {
/// Creates a new weak pointer for a garbage collected value.
pub fn new(value: &Gc<T>) -> Self {
Self {
inner: Ephemeron::new(value, ()),
}
}
}
impl<T: Trace + ?Sized> WeakGc<T> {
/// Gets the value of this weak pointer, or `None` if the value was already garbage collected.
pub fn value(&self) -> Option<&T> {
self.inner.key()
}
/// Upgrade returns a `Gc` pointer for the internal value if valid, or None if the value was already garbage collected.
pub fn upgrade(&self) -> Option<Gc<T>> {
self.inner.upgrade_key()
}
}
impl<T: Trace + ?Sized> Clone for WeakGc<T> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl<T: Trace + ?Sized> From<Ephemeron<T, ()>> for WeakGc<T> {
fn from(inner: Ephemeron<T, ()>) -> Self {
Self { inner }
}
}

View File

@@ -0,0 +1,31 @@
use super::{run_test, Harness};
use crate::{force_collect, Gc, GcCell};
#[test]
fn gc_basic_cell_allocation() {
run_test(|| {
let gc_cell = Gc::new(GcCell::new(16_u16));
force_collect();
Harness::assert_collections(1);
Harness::assert_bytes_allocated();
assert_eq!(*gc_cell.borrow_mut(), 16);
});
}
#[test]
fn gc_basic_pointer_alloc() {
run_test(|| {
let gc = Gc::new(16_u8);
force_collect();
Harness::assert_collections(1);
Harness::assert_bytes_allocated();
assert_eq!(*gc, 16);
drop(gc);
force_collect();
Harness::assert_collections(2);
Harness::assert_empty_gc();
});
}

View File

@@ -0,0 +1,14 @@
use super::run_test;
use crate::{Gc, GcCell};
#[test]
fn boa_borrow_mut_test() {
run_test(|| {
let v = Gc::new(GcCell::new(Vec::new()));
for _ in 1..=259 {
let cell = Gc::new(GcCell::new([0u8; 10]));
v.borrow_mut().push(cell);
}
});
}

View File

@@ -0,0 +1,37 @@
use crate::BOA_GC;
mod allocation;
mod cell;
mod weak;
struct Harness;
impl Harness {
fn assert_collections(o: usize) {
BOA_GC.with(|current| {
let gc = current.borrow();
assert_eq!(gc.runtime.collections, o);
});
}
fn assert_empty_gc() {
BOA_GC.with(|current| {
let gc = current.borrow();
assert!(gc.adult_start.get().is_none());
assert!(gc.runtime.bytes_allocated == 0);
});
}
fn assert_bytes_allocated() {
BOA_GC.with(|current| {
let gc = current.borrow();
assert!(gc.runtime.bytes_allocated > 0);
});
}
}
fn run_test(test: impl FnOnce() + Send + 'static) {
let handle = std::thread::spawn(test);
handle.join().unwrap();
}

View File

@@ -0,0 +1,132 @@
use super::run_test;
use crate::{force_collect, Ephemeron, Gc, WeakGc};
#[test]
fn eph_weak_gc_test() {
run_test(|| {
let gc_value = Gc::new(3);
{
let cloned_gc = gc_value.clone();
let weak = WeakGc::new(&cloned_gc);
assert_eq!(*weak.value().expect("Is live currently"), 3);
drop(cloned_gc);
force_collect();
assert_eq!(*weak.value().expect("WeakGc is still live here"), 3);
drop(gc_value);
force_collect();
assert!(weak.value().is_none());
}
});
}
#[test]
fn eph_ephemeron_test() {
run_test(|| {
let gc_value = Gc::new(3);
{
let cloned_gc = gc_value.clone();
let ephemeron = Ephemeron::new(&cloned_gc, String::from("Hello World!"));
assert_eq!(*ephemeron.key().expect("Ephemeron is live"), 3);
assert_eq!(*ephemeron.value(), String::from("Hello World!"));
drop(cloned_gc);
force_collect();
assert_eq!(*ephemeron.key().expect("Ephemeron is still live here"), 3);
drop(gc_value);
force_collect();
assert!(ephemeron.key().is_none());
}
});
}
#[test]
fn eph_allocation_chains() {
run_test(|| {
let gc_value = Gc::new(String::from("foo"));
{
let cloned_gc = gc_value.clone();
let weak = WeakGc::new(&cloned_gc);
let wrap = Gc::new(weak);
assert_eq!(wrap.value().expect("weak is live"), &String::from("foo"));
let eph = Ephemeron::new(&wrap, 3);
drop(cloned_gc);
force_collect();
assert_eq!(
eph.key()
.expect("eph is still live")
.value()
.expect("weak is still live"),
&String::from("foo")
);
drop(gc_value);
force_collect();
assert!(eph.key().expect("eph is still live").value().is_none());
}
});
}
#[test]
fn eph_basic_alloc_dump_test() {
run_test(|| {
let gc_value = Gc::new(String::from("gc here"));
let _gc_two = Gc::new("hmmm");
let eph = Ephemeron::new(&gc_value, 4);
let _fourth = Gc::new("tail");
assert_eq!(*eph.key().expect("must be live"), String::from("gc here"));
});
}
#[test]
fn eph_basic_upgrade_test() {
run_test(|| {
let init_gc = Gc::new(String::from("foo"));
let weak = WeakGc::new(&init_gc);
let new_gc = weak.upgrade().expect("Weak is still live");
drop(weak);
force_collect();
assert_eq!(*init_gc, *new_gc);
});
}
#[test]
fn eph_basic_clone_test() {
run_test(|| {
let init_gc = Gc::new(String::from("bar"));
let weak = WeakGc::new(&init_gc);
let new_gc = weak.upgrade().expect("Weak is live");
let new_weak = weak.clone();
drop(weak);
force_collect();
assert_eq!(*new_gc, *new_weak.value().expect("weak should be live"));
assert_eq!(
*init_gc,
*new_weak.value().expect("weak_should be live still")
);
});
}

View File

@@ -0,0 +1,466 @@
use std::{
borrow::{Cow, ToOwned},
cell::Cell,
collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque},
hash::{BuildHasher, Hash},
marker::PhantomData,
num::{
NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128,
NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize,
},
path::{Path, PathBuf},
rc::Rc,
sync::atomic::{
AtomicBool, AtomicI16, AtomicI32, AtomicI64, AtomicI8, AtomicIsize, AtomicU16, AtomicU32,
AtomicU64, AtomicU8, AtomicUsize,
},
};
/// Substitute for the [`Drop`] trait for garbage collected types.
pub trait Finalize {
/// Cleanup logic for a type.
fn finalize(&self) {}
}
/// The Trace trait, which needs to be implemented on garbage-collected objects.
///
/// # Safety
///
/// - An incorrect implementation of the trait can result in heap overflows, data corruption,
/// use-after-free, or Undefined Behaviour in general.
///
/// - Calling any of the functions marked as `unsafe` outside of the context of the garbage collector
/// can result in Undefined Behaviour.
pub unsafe trait Trace: Finalize {
/// Marks all contained `Gc`s.
///
/// # Safety
///
/// See [`Trace`].
unsafe fn trace(&self);
/// Marks all contained weak references of a `Gc`.
///
/// # Safety
///
/// See [`Trace`].
unsafe fn weak_trace(&self);
/// Increments the root-count of all contained `Gc`s.
///
/// # Safety
///
/// See [`Trace`].
unsafe fn root(&self);
/// Decrements the root-count of all contained `Gc`s.
///
/// # Safety
///
/// See [`Trace`].
unsafe fn unroot(&self);
/// Checks if an ephemeron's key is marked.
#[doc(hidden)]
fn is_marked_ephemeron(&self) -> bool {
false
}
/// Runs [`Finalize::finalize`] on this object and all
/// contained subobjects.
fn run_finalizer(&self);
}
/// Utility macro to define an empty implementation of [`Trace`].
///
/// Use this for marking types as not containing any `Trace` types.
#[macro_export]
macro_rules! empty_trace {
() => {
#[inline]
unsafe fn trace(&self) {}
#[inline]
unsafe fn weak_trace(&self) {}
#[inline]
unsafe fn root(&self) {}
#[inline]
unsafe fn unroot(&self) {}
#[inline]
fn run_finalizer(&self) {
$crate::Finalize::finalize(self)
}
};
}
/// Utility macro to manually implement [`Trace`] on a type.
///
/// You define a `this` parameter name and pass in a body, which should call `mark` on every
/// traceable element inside the body. The mark implementation will automatically delegate to the
/// correct method on the argument.
///
/// # Safety
///
/// Misusing the `mark` function may result in Undefined Behaviour.
#[macro_export]
macro_rules! custom_trace {
($this:ident, $body:expr) => {
#[inline]
unsafe fn trace(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `trace` is correctly implemented.
unsafe {
$crate::Trace::trace(it);
}
}
let $this = self;
$body
}
#[inline]
unsafe fn weak_trace(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `weak_trace` is correctly implemented.
unsafe {
$crate::Trace::weak_trace(it);
}
}
let $this = self;
$body
}
#[inline]
unsafe fn root(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `root` is correctly implemented.
unsafe {
$crate::Trace::root(it);
}
}
let $this = self;
$body
}
#[inline]
unsafe fn unroot(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `unroot` is correctly implemented.
unsafe {
$crate::Trace::unroot(it);
}
}
let $this = self;
$body
}
#[inline]
fn run_finalizer(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
$crate::Trace::run_finalizer(it);
}
$crate::Finalize::finalize(self);
let $this = self;
$body
}
};
}
impl<T: ?Sized> Finalize for &'static T {}
// SAFETY: 'static references don't need to be traced, since they live indefinitely.
unsafe impl<T: ?Sized> Trace for &'static T {
empty_trace!();
}
macro_rules! simple_empty_finalize_trace {
($($T:ty),*) => {
$(
impl Finalize for $T {}
// SAFETY:
// Primitive types and string types don't have inner nodes that need to be marked.
unsafe impl Trace for $T { empty_trace!(); }
)*
}
}
simple_empty_finalize_trace![
(),
bool,
isize,
usize,
i8,
u8,
i16,
u16,
i32,
u32,
i64,
u64,
i128,
u128,
f32,
f64,
char,
String,
Box<str>,
Rc<str>,
Path,
PathBuf,
NonZeroIsize,
NonZeroUsize,
NonZeroI8,
NonZeroU8,
NonZeroI16,
NonZeroU16,
NonZeroI32,
NonZeroU32,
NonZeroI64,
NonZeroU64,
NonZeroI128,
NonZeroU128,
AtomicBool,
AtomicIsize,
AtomicUsize,
AtomicI8,
AtomicU8,
AtomicI16,
AtomicU16,
AtomicI32,
AtomicU32,
AtomicI64,
AtomicU64
];
impl<T: Trace, const N: usize> Finalize for [T; N] {}
// SAFETY:
// All elements inside the array are correctly marked.
unsafe impl<T: Trace, const N: usize> Trace for [T; N] {
custom_trace!(this, {
for v in this {
mark(v);
}
});
}
macro_rules! fn_finalize_trace_one {
($ty:ty $(,$args:ident)*) => {
impl<Ret $(,$args)*> Finalize for $ty {}
// SAFETY:
// Function pointers don't have inner nodes that need to be marked.
unsafe impl<Ret $(,$args)*> Trace for $ty { empty_trace!(); }
}
}
macro_rules! fn_finalize_trace_group {
() => {
fn_finalize_trace_one!(extern "Rust" fn () -> Ret);
fn_finalize_trace_one!(extern "C" fn () -> Ret);
fn_finalize_trace_one!(unsafe extern "Rust" fn () -> Ret);
fn_finalize_trace_one!(unsafe extern "C" fn () -> Ret);
};
($($args:ident),*) => {
fn_finalize_trace_one!(extern "Rust" fn ($($args),*) -> Ret, $($args),*);
fn_finalize_trace_one!(extern "C" fn ($($args),*) -> Ret, $($args),*);
fn_finalize_trace_one!(extern "C" fn ($($args),*, ...) -> Ret, $($args),*);
fn_finalize_trace_one!(unsafe extern "Rust" fn ($($args),*) -> Ret, $($args),*);
fn_finalize_trace_one!(unsafe extern "C" fn ($($args),*) -> Ret, $($args),*);
fn_finalize_trace_one!(unsafe extern "C" fn ($($args),*, ...) -> Ret, $($args),*);
}
}
macro_rules! tuple_finalize_trace {
() => {}; // This case is handled above, by simple_finalize_empty_trace!().
($($args:ident),*) => {
impl<$($args),*> Finalize for ($($args,)*) {}
// SAFETY:
// All elements inside the tuple are correctly marked.
unsafe impl<$($args: $crate::Trace),*> Trace for ($($args,)*) {
custom_trace!(this, {
#[allow(non_snake_case, unused_unsafe)]
fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*)) {
// SAFETY: The implementor must ensure a correct implementation.
unsafe { $(mark($args);)* }
}
avoid_lints(this)
});
}
}
}
macro_rules! type_arg_tuple_based_finalize_trace_impls {
($(($($args:ident),*);)*) => {
$(
fn_finalize_trace_group!($($args),*);
tuple_finalize_trace!($($args),*);
)*
}
}
type_arg_tuple_based_finalize_trace_impls![
();
(A);
(A, B);
(A, B, C);
(A, B, C, D);
(A, B, C, D, E);
(A, B, C, D, E, F);
(A, B, C, D, E, F, G);
(A, B, C, D, E, F, G, H);
(A, B, C, D, E, F, G, H, I);
(A, B, C, D, E, F, G, H, I, J);
(A, B, C, D, E, F, G, H, I, J, K);
(A, B, C, D, E, F, G, H, I, J, K, L);
];
impl<T: Trace + ?Sized> Finalize for Box<T> {}
// SAFETY: The inner value of the `Box` is correctly marked.
unsafe impl<T: Trace + ?Sized> Trace for Box<T> {
custom_trace!(this, {
mark(&**this);
});
}
impl<T: Trace> Finalize for Box<[T]> {}
// SAFETY: All the inner elements of the `Box` array are correctly marked.
unsafe impl<T: Trace> Trace for Box<[T]> {
custom_trace!(this, {
for e in this.iter() {
mark(e);
}
});
}
impl<T: Trace> Finalize for Vec<T> {}
// SAFETY: All the inner elements of the `Vec` are correctly marked.
unsafe impl<T: Trace> Trace for Vec<T> {
custom_trace!(this, {
for e in this {
mark(e);
}
});
}
impl<T: Trace> Finalize for Option<T> {}
// SAFETY: The inner value of the `Option` is correctly marked.
unsafe impl<T: Trace> Trace for Option<T> {
custom_trace!(this, {
if let Some(ref v) = *this {
mark(v);
}
});
}
impl<T: Trace, E: Trace> Finalize for Result<T, E> {}
// SAFETY: Both inner values of the `Result` are correctly marked.
unsafe impl<T: Trace, E: Trace> Trace for Result<T, E> {
custom_trace!(this, {
match *this {
Ok(ref v) => mark(v),
Err(ref v) => mark(v),
}
});
}
impl<T: Ord + Trace> Finalize for BinaryHeap<T> {}
// SAFETY: All the elements of the `BinaryHeap` are correctly marked.
unsafe impl<T: Ord + Trace> Trace for BinaryHeap<T> {
custom_trace!(this, {
for v in this.iter() {
mark(v);
}
});
}
impl<K: Trace, V: Trace> Finalize for BTreeMap<K, V> {}
// SAFETY: All the elements of the `BTreeMap` are correctly marked.
unsafe impl<K: Trace, V: Trace> Trace for BTreeMap<K, V> {
custom_trace!(this, {
for (k, v) in this {
mark(k);
mark(v);
}
});
}
impl<T: Trace> Finalize for BTreeSet<T> {}
// SAFETY: All the elements of the `BTreeSet` are correctly marked.
unsafe impl<T: Trace> Trace for BTreeSet<T> {
custom_trace!(this, {
for v in this {
mark(v);
}
});
}
impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Finalize for HashMap<K, V, S> {}
// SAFETY: All the elements of the `HashMap` are correctly marked.
unsafe impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Trace for HashMap<K, V, S> {
custom_trace!(this, {
for (k, v) in this.iter() {
mark(k);
mark(v);
}
});
}
impl<T: Eq + Hash + Trace, S: BuildHasher> Finalize for HashSet<T, S> {}
// SAFETY: All the elements of the `HashSet` are correctly marked.
unsafe impl<T: Eq + Hash + Trace, S: BuildHasher> Trace for HashSet<T, S> {
custom_trace!(this, {
for v in this.iter() {
mark(v);
}
});
}
impl<T: Eq + Hash + Trace> Finalize for LinkedList<T> {}
// SAFETY: All the elements of the `LinkedList` are correctly marked.
unsafe impl<T: Eq + Hash + Trace> Trace for LinkedList<T> {
custom_trace!(this, {
for v in this.iter() {
mark(v);
}
});
}
impl<T> Finalize for PhantomData<T> {}
// SAFETY: A `PhantomData` doesn't have inner data that needs to be marked.
unsafe impl<T> Trace for PhantomData<T> {
empty_trace!();
}
impl<T: Trace> Finalize for VecDeque<T> {}
// SAFETY: All the elements of the `VecDeque` are correctly marked.
unsafe impl<T: Trace> Trace for VecDeque<T> {
custom_trace!(this, {
for v in this.iter() {
mark(v);
}
});
}
impl<T: ToOwned + Trace + ?Sized> Finalize for Cow<'static, T> {}
// SAFETY: 'static references don't need to be traced, since they live indefinitely, and the owned
// variant is correctly marked.
unsafe impl<T: ToOwned + Trace + ?Sized> Trace for Cow<'static, T>
where
T::Owned: Trace,
{
custom_trace!(this, {
if let Cow::Owned(ref v) = this {
mark(v);
}
});
}
impl<T: Trace> Finalize for Cell<Option<T>> {
fn finalize(&self) {
if let Some(t) = self.take() {
t.finalize();
self.set(Some(t));
}
}
}
unsafe impl<T: Trace> Trace for Cell<Option<T>> {
custom_trace!(this, {
if let Some(t) = this.take() {
mark(&t);
this.set(Some(t));
}
});
}