extern crate app_units;
extern crate cssparser;
extern crate euclid;
extern crate selectors;
extern crate servo_arc;
extern crate smallbitvec;
extern crate smallvec;
extern crate void;
use std::hash::{BuildHasher, Hash};
use std::mem::size_of;
use std::ops::Range;
use std::ops::{Deref, DerefMut};
use std::os::raw::c_void;
use void::Void;
type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
pub struct MallocSizeOfOps {
size_of_op: VoidPtrToSizeFn,
enclosing_size_of_op: Option<VoidPtrToSizeFn>,
have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
}
impl MallocSizeOfOps {
pub fn new(
size_of: VoidPtrToSizeFn,
malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
) -> Self {
MallocSizeOfOps {
size_of_op: size_of,
enclosing_size_of_op: malloc_enclosing_size_of,
have_seen_ptr_op: have_seen_ptr,
}
}
fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
return ptr as *const usize as usize <= 256;
}
pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
if MallocSizeOfOps::is_empty(ptr) {
0
} else {
(self.size_of_op)(ptr as *const c_void)
}
}
pub fn has_malloc_enclosing_size_of(&self) -> bool {
self.enclosing_size_of_op.is_some()
}
pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
assert!(!MallocSizeOfOps::is_empty(ptr));
(self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
}
pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
let have_seen_ptr_op = self
.have_seen_ptr_op
.as_mut()
.expect("missing have_seen_ptr_op");
have_seen_ptr_op(ptr as *const c_void)
}
}
pub trait MallocSizeOf {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocShallowSizeOf {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocUnconditionalSizeOf {
fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocUnconditionalShallowSizeOf {
fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocConditionalSizeOf {
fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocConditionalShallowSizeOf {
fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
impl MallocSizeOf for String {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.as_ptr()) }
}
}
impl<'a, T: ?Sized> MallocSizeOf for &'a T {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
0
}
}
impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(&**self) }
}
}
impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl MallocSizeOf for () {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
0
}
}
impl<T1, T2> MallocSizeOf for (T1, T2)
where
T1: MallocSizeOf,
T2: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops) + self.1.size_of(ops)
}
}
impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
where
T1: MallocSizeOf,
T2: MallocSizeOf,
T3: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
}
}
impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
where
T1: MallocSizeOf,
T2: MallocSizeOf,
T3: MallocSizeOf,
T4: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops) + self.3.size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if let Some(val) = self.as_ref() {
val.size_of(ops)
} else {
0
}
}
}
impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
match *self {
Ok(ref x) => x.size_of(ops),
Err(ref e) => e.size_of(ops),
}
}
}
impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.get().size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.borrow().size_of(ops)
}
}
impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
where
B::Owned: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
match *self {
std::borrow::Cow::Borrowed(_) => 0,
std::borrow::Cow::Owned(ref b) => b.size_of(ops),
}
}
}
impl<T: MallocSizeOf> MallocSizeOf for [T] {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = 0;
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
impl<T> MallocShallowSizeOf for Vec<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.as_ptr()) }
}
}
impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
impl<T> MallocShallowSizeOf for std::collections::VecDeque<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if ops.has_malloc_enclosing_size_of() {
if let Some(front) = self.front() {
unsafe { ops.malloc_enclosing_size_of(&*front) }
} else {
0
}
} else {
self.capacity() * size_of::<T>()
}
}
}
impl<T: MallocSizeOf> MallocSizeOf for std::collections::VecDeque<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
impl<A: smallvec::Array> MallocShallowSizeOf for smallvec::SmallVec<A> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if self.spilled() {
unsafe { ops.malloc_size_of(self.as_ptr()) }
} else {
0
}
}
}
impl<A> MallocSizeOf for smallvec::SmallVec<A>
where
A: smallvec::Array,
A::Item: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
impl<T> MallocShallowSizeOf for thin_vec::ThinVec<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if self.capacity() == 0 {
return 0;
}
assert_eq!(
std::mem::size_of::<Self>(),
std::mem::size_of::<*const ()>()
);
unsafe { ops.malloc_size_of(*(self as *const Self as *const *const ())) }
}
}
impl<T: MallocSizeOf> MallocSizeOf for thin_vec::ThinVec<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
macro_rules! malloc_size_of_hash_set {
($ty:ty) => {
impl<T, S> MallocShallowSizeOf for $ty
where
T: Eq + Hash,
S: BuildHasher,
{
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if ops.has_malloc_enclosing_size_of() {
self.iter()
.next()
.map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
} else {
self.capacity() * (size_of::<T>() + size_of::<usize>())
}
}
}
impl<T, S> MallocSizeOf for $ty
where
T: Eq + Hash + MallocSizeOf,
S: BuildHasher,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for t in self.iter() {
n += t.size_of(ops);
}
n
}
}
};
}
malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
macro_rules! malloc_size_of_hash_map {
($ty:ty) => {
impl<K, V, S> MallocShallowSizeOf for $ty
where
K: Eq + Hash,
S: BuildHasher,
{
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if ops.has_malloc_enclosing_size_of() {
self.values()
.next()
.map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
} else {
self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
}
}
}
impl<K, V, S> MallocSizeOf for $ty
where
K: Eq + Hash + MallocSizeOf,
V: MallocSizeOf,
S: BuildHasher,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for (k, v) in self.iter() {
n += k.size_of(ops);
n += v.size_of(ops);
}
n
}
}
};
}
malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
impl<K, V> MallocShallowSizeOf for std::collections::BTreeMap<K, V>
where
K: Eq + Hash,
{
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if ops.has_malloc_enclosing_size_of() {
self.values()
.next()
.map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
} else {
self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
}
}
}
impl<K, V> MallocSizeOf for std::collections::BTreeMap<K, V>
where
K: Eq + Hash + MallocSizeOf,
V: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for (k, v) in self.iter() {
n += k.size_of(ops);
n += v.size_of(ops);
}
n
}
}
impl<T> MallocSizeOf for std::marker::PhantomData<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
0
}
}
impl<T> MallocUnconditionalShallowSizeOf for servo_arc::Arc<T> {
fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.heap_ptr()) }
}
}
impl<T: MallocSizeOf> MallocUnconditionalSizeOf for servo_arc::Arc<T> {
fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T> MallocConditionalShallowSizeOf for servo_arc::Arc<T> {
fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if ops.have_seen_ptr(self.heap_ptr()) {
0
} else {
self.unconditional_shallow_size_of(ops)
}
}
}
impl<T: MallocSizeOf> MallocConditionalSizeOf for servo_arc::Arc<T> {
fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if ops.have_seen_ptr(self.heap_ptr()) {
0
} else {
self.unconditional_size_of(ops)
}
}
}
impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
(*self.lock().unwrap()).size_of(ops)
}
}
impl MallocSizeOf for smallbitvec::SmallBitVec {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if let Some(ptr) = self.heap_ptr() {
unsafe { ops.malloc_size_of(ptr) }
} else {
0
}
}
}
impl<T: MallocSizeOf, Unit> MallocSizeOf for euclid::Length<T, Unit> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops)
}
}
impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Scale<T, Src, Dst> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops)
}
}
impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Point2D<T, U> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.x.size_of(ops) + self.y.size_of(ops)
}
}
impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Rect<T, U> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.origin.size_of(ops) + self.size.size_of(ops)
}
}
impl<T: MallocSizeOf, U> MallocSizeOf for euclid::SideOffsets2D<T, U> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.top.size_of(ops) +
self.right.size_of(ops) +
self.bottom.size_of(ops) +
self.left.size_of(ops)
}
}
impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Size2D<T, U> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.width.size_of(ops) + self.height.size_of(ops)
}
}
impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform2D<T, Src, Dst> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.m11.size_of(ops) +
self.m12.size_of(ops) +
self.m21.size_of(ops) +
self.m22.size_of(ops) +
self.m31.size_of(ops) +
self.m32.size_of(ops)
}
}
impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform3D<T, Src, Dst> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.m11.size_of(ops) +
self.m12.size_of(ops) +
self.m13.size_of(ops) +
self.m14.size_of(ops) +
self.m21.size_of(ops) +
self.m22.size_of(ops) +
self.m23.size_of(ops) +
self.m24.size_of(ops) +
self.m31.size_of(ops) +
self.m32.size_of(ops) +
self.m33.size_of(ops) +
self.m34.size_of(ops) +
self.m41.size_of(ops) +
self.m42.size_of(ops) +
self.m43.size_of(ops) +
self.m44.size_of(ops)
}
}
impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Vector2D<T, U> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.x.size_of(ops) + self.y.size_of(ops)
}
}
impl MallocSizeOf for selectors::parser::AncestorHashes {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let selectors::parser::AncestorHashes { ref packed_hashes } = *self;
packed_hashes.size_of(ops)
}
}
impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
for selectors::parser::Selector<Impl>
where
Impl::NonTSPseudoClass: MallocSizeOf,
Impl::PseudoElement: MallocSizeOf,
{
fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = 0;
n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
for component in self.iter_raw_match_order() {
n += component.size_of(ops);
}
n
}
}
impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
for selectors::parser::SelectorList<Impl>
where
Impl::NonTSPseudoClass: MallocSizeOf,
Impl::PseudoElement: MallocSizeOf,
{
fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = 0;
n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
if self.len() > 1 {
for selector in self.slice().iter() {
n += selector.size_of(ops);
}
}
n
}
}
impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
for selectors::parser::Component<Impl>
where
Impl::NonTSPseudoClass: MallocSizeOf,
Impl::PseudoElement: MallocSizeOf,
{
fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
use selectors::parser::Component;
match self {
Component::AttributeOther(ref attr_selector) => attr_selector.size_of(ops),
Component::Negation(ref components) => components.unconditional_size_of(ops),
Component::NonTSPseudoClass(ref pseudo) => (*pseudo).size_of(ops),
Component::Slotted(ref selector) | Component::Host(Some(ref selector)) => {
selector.unconditional_size_of(ops)
},
Component::Is(ref list) | Component::Where(ref list) => list.unconditional_size_of(ops),
Component::Has(ref relative_selectors) => relative_selectors.size_of(ops),
Component::NthOf(ref nth_of_data) => nth_of_data.size_of(ops),
Component::PseudoElement(ref pseudo) => (*pseudo).size_of(ops),
Component::Combinator(..) |
Component::ExplicitAnyNamespace |
Component::ExplicitNoNamespace |
Component::DefaultNamespace(..) |
Component::Namespace(..) |
Component::ExplicitUniversalType |
Component::LocalName(..) |
Component::ID(..) |
Component::Part(..) |
Component::Class(..) |
Component::AttributeInNoNamespaceExists { .. } |
Component::AttributeInNoNamespace { .. } |
Component::Root |
Component::Empty |
Component::Scope |
Component::ImplicitScope |
Component::ParentSelector |
Component::Nth(..) |
Component::Host(None) |
Component::RelativeSelectorAnchor |
Component::Invalid(..) => 0,
}
}
}
impl<Impl: selectors::parser::SelectorImpl> MallocSizeOf
for selectors::attr::AttrSelectorWithOptionalNamespace<Impl>
{
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
0
}
}
impl MallocSizeOf for Void {
#[inline]
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
void::unreachable(*self)
}
}
#[cfg(feature = "servo")]
impl<Static: string_cache::StaticAtomSet> MallocSizeOf for string_cache::Atom<Static> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
0
}
}
#[macro_export]
macro_rules! malloc_size_of_is_0(
($($ty:ty),+) => (
$(
impl $crate::MallocSizeOf for $ty {
#[inline(always)]
fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
0
}
}
)+
);
($($ty:ident<$($gen:ident),+>),+) => (
$(
impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
#[inline(always)]
fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
0
}
}
)+
);
);
malloc_size_of_is_0!(bool, char, str);
malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
malloc_size_of_is_0!(f32, f64);
malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
malloc_size_of_is_0!(std::num::NonZeroUsize);
malloc_size_of_is_0!(std::num::NonZeroU64);
malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
malloc_size_of_is_0!(Range<f32>, Range<f64>);
malloc_size_of_is_0!(app_units::Au);
malloc_size_of_is_0!(cssparser::TokenSerializationType, cssparser::SourceLocation, cssparser::SourcePosition);
malloc_size_of_is_0!(selectors::OpaqueElement);
#[derive(Clone)]
pub struct Measurable<T: MallocSizeOf>(pub T);
impl<T: MallocSizeOf> Deref for Measurable<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T: MallocSizeOf> DerefMut for Measurable<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}