stylo_malloc_size_of/
lib.rs

1// Copyright 2016-2017 The Servo Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! A crate for measuring the heap usage of data structures in a way that
12//! integrates with Firefox's memory reporting, particularly the use of
13//! mozjemalloc and DMD. In particular, it has the following features.
14//! - It isn't bound to a particular heap allocator.
15//! - It provides traits for both "shallow" and "deep" measurement, which gives
16//!   flexibility in the cases where the traits can't be used.
17//! - It allows for measuring blocks even when only an interior pointer can be
18//!   obtained for heap allocations, e.g. `HashSet` and `HashMap`. (This relies
19//!   on the heap allocator having suitable support, which mozjemalloc has.)
20//! - It allows handling of types like `Rc` and `Arc` by providing traits that
21//!   are different to the ones for non-graph structures.
22//!
23//! Suggested uses are as follows.
24//! - When possible, use the `MallocSizeOf` trait. (Deriving support is
25//!   provided by the `malloc_size_of_derive` crate.)
26//! - If you need an additional synchronization argument, provide a function
27//!   that is like the standard trait method, but with the extra argument.
28//! - If you need multiple measurements for a type, provide a function named
29//!   `add_size_of` that takes a mutable reference to a struct that contains
30//!   the multiple measurement fields.
31//! - When deep measurement (via `MallocSizeOf`) cannot be implemented for a
32//!   type, shallow measurement (via `MallocShallowSizeOf`) in combination with
33//!   iteration can be a useful substitute.
34//! - `Rc` and `Arc` are always tricky, which is why `MallocSizeOf` is not (and
35//!   should not be) implemented for them.
36//! - If an `Rc` or `Arc` is known to be a "primary" reference and can always
37//!   be measured, it should be measured via the `MallocUnconditionalSizeOf`
38//!   trait.
39//! - If an `Rc` or `Arc` should be measured only if it hasn't been seen
40//!   before, it should be measured via the `MallocConditionalSizeOf` trait.
41//! - Using universal function call syntax is a good idea when measuring boxed
42//!   fields in structs, because it makes it clear that the Box is being
43//!   measured as well as the thing it points to. E.g.
44//!   `<Box<_> as MallocSizeOf>::size_of(field, ops)`.
45//!
46//!   Note: WebRender has a reduced fork of this crate, so that we can avoid
47//!   publishing this crate on crates.io.
48
49use std::hash::{BuildHasher, Hash};
50use std::mem::size_of;
51use std::ops::Range;
52use std::ops::{Deref, DerefMut};
53use std::os::raw::c_void;
54use void::Void;
55
56/// A C function that takes a pointer to a heap allocation and returns its size.
57type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
58
59/// A closure implementing a stateful predicate on pointers.
60type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
61
62/// Operations used when measuring heap usage of data structures.
63pub struct MallocSizeOfOps {
64    /// A function that returns the size of a heap allocation.
65    size_of_op: VoidPtrToSizeFn,
66
67    /// Like `size_of_op`, but can take an interior pointer. Optional because
68    /// not all allocators support this operation. If it's not provided, some
69    /// memory measurements will actually be computed estimates rather than
70    /// real and accurate measurements.
71    enclosing_size_of_op: Option<VoidPtrToSizeFn>,
72
73    /// Check if a pointer has been seen before, and remember it for next time.
74    /// Useful when measuring `Rc`s and `Arc`s. Optional, because many places
75    /// don't need it.
76    have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
77}
78
79impl MallocSizeOfOps {
80    pub fn new(
81        size_of: VoidPtrToSizeFn,
82        malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
83        have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
84    ) -> Self {
85        MallocSizeOfOps {
86            size_of_op: size_of,
87            enclosing_size_of_op: malloc_enclosing_size_of,
88            have_seen_ptr_op: have_seen_ptr,
89        }
90    }
91
92    /// Check if an allocation is empty. This relies on knowledge of how Rust
93    /// handles empty allocations, which may change in the future.
94    fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
95        // The correct condition is this:
96        //   `ptr as usize <= ::std::mem::align_of::<T>()`
97        // But we can't call align_of() on a ?Sized T. So we approximate it
98        // with the following. 256 is large enough that it should always be
99        // larger than the required alignment, but small enough that it is
100        // always in the first page of memory and therefore not a legitimate
101        // address.
102        return ptr as *const usize as usize <= 256;
103    }
104
105    /// Call `size_of_op` on `ptr`, first checking that the allocation isn't
106    /// empty, because some types (such as `Vec`) utilize empty allocations.
107    pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
108        if MallocSizeOfOps::is_empty(ptr) {
109            0
110        } else {
111            (self.size_of_op)(ptr as *const c_void)
112        }
113    }
114
115    /// Is an `enclosing_size_of_op` available?
116    pub fn has_malloc_enclosing_size_of(&self) -> bool {
117        self.enclosing_size_of_op.is_some()
118    }
119
120    /// Call `enclosing_size_of_op`, which must be available, on `ptr`, which
121    /// must not be empty.
122    pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
123        assert!(!MallocSizeOfOps::is_empty(ptr));
124        (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
125    }
126
127    /// Call `have_seen_ptr_op` on `ptr`.
128    pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
129        let have_seen_ptr_op = self
130            .have_seen_ptr_op
131            .as_mut()
132            .expect("missing have_seen_ptr_op");
133        have_seen_ptr_op(ptr as *const c_void)
134    }
135}
136
137/// Trait for measuring the "deep" heap usage of a data structure. This is the
138/// most commonly-used of the traits.
139pub trait MallocSizeOf {
140    /// Measure the heap usage of all descendant heap-allocated structures, but
141    /// not the space taken up by the value itself.
142    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
143}
144
145/// Trait for measuring the "shallow" heap usage of a container.
146pub trait MallocShallowSizeOf {
147    /// Measure the heap usage of immediate heap-allocated descendant
148    /// structures, but not the space taken up by the value itself. Anything
149    /// beyond the immediate descendants must be measured separately, using
150    /// iteration.
151    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
152}
153
154/// Like `MallocSizeOf`, but with a different name so it cannot be used
155/// accidentally with derive(MallocSizeOf). For use with types like `Rc` and
156/// `Arc` when appropriate (e.g. when measuring a "primary" reference).
157pub trait MallocUnconditionalSizeOf {
158    /// Measure the heap usage of all heap-allocated descendant structures, but
159    /// not the space taken up by the value itself.
160    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
161}
162
163/// `MallocUnconditionalSizeOf` combined with `MallocShallowSizeOf`.
164pub trait MallocUnconditionalShallowSizeOf {
165    /// `unconditional_size_of` combined with `shallow_size_of`.
166    fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
167}
168
169/// Like `MallocSizeOf`, but only measures if the value hasn't already been
170/// measured. For use with types like `Rc` and `Arc` when appropriate (e.g.
171/// when there is no "primary" reference).
172pub trait MallocConditionalSizeOf {
173    /// Measure the heap usage of all heap-allocated descendant structures, but
174    /// not the space taken up by the value itself, and only if that heap usage
175    /// hasn't already been measured.
176    fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
177}
178
179/// `MallocConditionalSizeOf` combined with `MallocShallowSizeOf`.
180pub trait MallocConditionalShallowSizeOf {
181    /// `conditional_size_of` combined with `shallow_size_of`.
182    fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
183}
184
185impl MallocSizeOf for String {
186    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
187        unsafe { ops.malloc_size_of(self.as_ptr()) }
188    }
189}
190
191impl<'a, T: ?Sized> MallocSizeOf for &'a T {
192    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
193        // Zero makes sense for a non-owning reference.
194        0
195    }
196}
197
198impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
199    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
200        unsafe { ops.malloc_size_of(&**self) }
201    }
202}
203
204impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
205    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
206        self.shallow_size_of(ops) + (**self).size_of(ops)
207    }
208}
209
210impl MallocSizeOf for () {
211    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
212        0
213    }
214}
215
216impl<T1, T2> MallocSizeOf for (T1, T2)
217where
218    T1: MallocSizeOf,
219    T2: MallocSizeOf,
220{
221    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
222        self.0.size_of(ops) + self.1.size_of(ops)
223    }
224}
225
226impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
227where
228    T1: MallocSizeOf,
229    T2: MallocSizeOf,
230    T3: MallocSizeOf,
231{
232    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
233        self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
234    }
235}
236
237impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
238where
239    T1: MallocSizeOf,
240    T2: MallocSizeOf,
241    T3: MallocSizeOf,
242    T4: MallocSizeOf,
243{
244    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
245        self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops) + self.3.size_of(ops)
246    }
247}
248
249impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
250    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
251        if let Some(val) = self.as_ref() {
252            val.size_of(ops)
253        } else {
254            0
255        }
256    }
257}
258
259impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
260    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
261        match *self {
262            Ok(ref x) => x.size_of(ops),
263            Err(ref e) => e.size_of(ops),
264        }
265    }
266}
267
268impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
269    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
270        self.get().size_of(ops)
271    }
272}
273
274impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
275    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
276        self.borrow().size_of(ops)
277    }
278}
279
280impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
281where
282    B::Owned: MallocSizeOf,
283{
284    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
285        match *self {
286            std::borrow::Cow::Borrowed(_) => 0,
287            std::borrow::Cow::Owned(ref b) => b.size_of(ops),
288        }
289    }
290}
291
292impl<T: MallocSizeOf> MallocSizeOf for [T] {
293    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
294        let mut n = 0;
295        for elem in self.iter() {
296            n += elem.size_of(ops);
297        }
298        n
299    }
300}
301
302impl<T> MallocShallowSizeOf for Vec<T> {
303    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
304        unsafe { ops.malloc_size_of(self.as_ptr()) }
305    }
306}
307
308impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
309    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
310        let mut n = self.shallow_size_of(ops);
311        for elem in self.iter() {
312            n += elem.size_of(ops);
313        }
314        n
315    }
316}
317
318impl<T> MallocShallowSizeOf for std::collections::VecDeque<T> {
319    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
320        if ops.has_malloc_enclosing_size_of() {
321            if let Some(front) = self.front() {
322                // The front element is an interior pointer.
323                unsafe { ops.malloc_enclosing_size_of(&*front) }
324            } else {
325                // This assumes that no memory is allocated when the VecDeque is empty.
326                0
327            }
328        } else {
329            // An estimate.
330            self.capacity() * size_of::<T>()
331        }
332    }
333}
334
335impl<T: MallocSizeOf> MallocSizeOf for std::collections::VecDeque<T> {
336    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
337        let mut n = self.shallow_size_of(ops);
338        for elem in self.iter() {
339            n += elem.size_of(ops);
340        }
341        n
342    }
343}
344
345impl<A: smallvec::Array> MallocShallowSizeOf for smallvec::SmallVec<A> {
346    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
347        if self.spilled() {
348            unsafe { ops.malloc_size_of(self.as_ptr()) }
349        } else {
350            0
351        }
352    }
353}
354
355impl<A> MallocSizeOf for smallvec::SmallVec<A>
356where
357    A: smallvec::Array,
358    A::Item: MallocSizeOf,
359{
360    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
361        let mut n = self.shallow_size_of(ops);
362        for elem in self.iter() {
363            n += elem.size_of(ops);
364        }
365        n
366    }
367}
368
369impl<T> MallocShallowSizeOf for thin_vec::ThinVec<T> {
370    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
371        if self.capacity() == 0 {
372            // If it's the singleton we might not be a heap pointer.
373            return 0;
374        }
375
376        assert_eq!(
377            std::mem::size_of::<Self>(),
378            std::mem::size_of::<*const ()>()
379        );
380        unsafe { ops.malloc_size_of(*(self as *const Self as *const *const ())) }
381    }
382}
383
384impl<T: MallocSizeOf> MallocSizeOf for thin_vec::ThinVec<T> {
385    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
386        let mut n = self.shallow_size_of(ops);
387        for elem in self.iter() {
388            n += elem.size_of(ops);
389        }
390        n
391    }
392}
393
394macro_rules! malloc_size_of_hash_set {
395    ($ty:ty) => {
396        impl<T, S> MallocShallowSizeOf for $ty
397        where
398            T: Eq + Hash,
399            S: BuildHasher,
400        {
401            fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
402                if ops.has_malloc_enclosing_size_of() {
403                    // The first value from the iterator gives us an interior pointer.
404                    // `ops.malloc_enclosing_size_of()` then gives us the storage size.
405                    // This assumes that the `HashSet`'s contents (values and hashes)
406                    // are all stored in a single contiguous heap allocation.
407                    self.iter()
408                        .next()
409                        .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
410                } else {
411                    // An estimate.
412                    self.capacity() * (size_of::<T>() + size_of::<usize>())
413                }
414            }
415        }
416
417        impl<T, S> MallocSizeOf for $ty
418        where
419            T: Eq + Hash + MallocSizeOf,
420            S: BuildHasher,
421        {
422            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
423                let mut n = self.shallow_size_of(ops);
424                for t in self.iter() {
425                    n += t.size_of(ops);
426                }
427                n
428            }
429        }
430    };
431}
432
433malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
434
435macro_rules! malloc_size_of_hash_map {
436    ($ty:ty) => {
437        impl<K, V, S> MallocShallowSizeOf for $ty
438        where
439            K: Eq + Hash,
440            S: BuildHasher,
441        {
442            fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
443                // See the implementation for std::collections::HashSet for details.
444                if ops.has_malloc_enclosing_size_of() {
445                    self.values()
446                        .next()
447                        .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
448                } else {
449                    self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
450                }
451            }
452        }
453
454        impl<K, V, S> MallocSizeOf for $ty
455        where
456            K: Eq + Hash + MallocSizeOf,
457            V: MallocSizeOf,
458            S: BuildHasher,
459        {
460            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
461                let mut n = self.shallow_size_of(ops);
462                for (k, v) in self.iter() {
463                    n += k.size_of(ops);
464                    n += v.size_of(ops);
465                }
466                n
467            }
468        }
469    };
470}
471
472malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
473
474impl<K, V> MallocShallowSizeOf for std::collections::BTreeMap<K, V>
475where
476    K: Eq + Hash,
477{
478    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
479        if ops.has_malloc_enclosing_size_of() {
480            self.values()
481                .next()
482                .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
483        } else {
484            self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
485        }
486    }
487}
488
489impl<K, V> MallocSizeOf for std::collections::BTreeMap<K, V>
490where
491    K: Eq + Hash + MallocSizeOf,
492    V: MallocSizeOf,
493{
494    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
495        let mut n = self.shallow_size_of(ops);
496        for (k, v) in self.iter() {
497            n += k.size_of(ops);
498            n += v.size_of(ops);
499        }
500        n
501    }
502}
503
504// PhantomData is always 0.
505impl<T> MallocSizeOf for std::marker::PhantomData<T> {
506    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
507        0
508    }
509}
510
511// XXX: we don't want MallocSizeOf to be defined for Rc and Arc. If negative
512// trait bounds are ever allowed, this code should be uncommented.
513// (We do have a compile-fail test for this:
514// rc_arc_must_not_derive_malloc_size_of.rs)
515//impl<T> !MallocSizeOf for Arc<T> { }
516//impl<T> !MallocShallowSizeOf for Arc<T> { }
517
518impl<T> MallocUnconditionalShallowSizeOf for servo_arc::Arc<T> {
519    fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
520        unsafe { ops.malloc_size_of(self.heap_ptr()) }
521    }
522}
523
524impl<T: MallocSizeOf> MallocUnconditionalSizeOf for servo_arc::Arc<T> {
525    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
526        self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
527    }
528}
529
530impl<T> MallocConditionalShallowSizeOf for servo_arc::Arc<T> {
531    fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
532        if ops.have_seen_ptr(self.heap_ptr()) {
533            0
534        } else {
535            self.unconditional_shallow_size_of(ops)
536        }
537    }
538}
539
540impl<T: MallocSizeOf> MallocConditionalSizeOf for servo_arc::Arc<T> {
541    fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
542        if ops.have_seen_ptr(self.heap_ptr()) {
543            0
544        } else {
545            self.unconditional_size_of(ops)
546        }
547    }
548}
549
550/// If a mutex is stored directly as a member of a data type that is being measured,
551/// it is the unique owner of its contents and deserves to be measured.
552///
553/// If a mutex is stored inside of an Arc value as a member of a data type that is being measured,
554/// the Arc will not be automatically measured so there is no risk of overcounting the mutex's
555/// contents.
556impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
557    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
558        (*self.lock().unwrap()).size_of(ops)
559    }
560}
561
562impl MallocSizeOf for smallbitvec::SmallBitVec {
563    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
564        if let Some(ptr) = self.heap_ptr() {
565            unsafe { ops.malloc_size_of(ptr) }
566        } else {
567            0
568        }
569    }
570}
571
572impl<T: MallocSizeOf, Unit> MallocSizeOf for euclid::Length<T, Unit> {
573    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
574        self.0.size_of(ops)
575    }
576}
577
578impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Scale<T, Src, Dst> {
579    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
580        self.0.size_of(ops)
581    }
582}
583
584impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Point2D<T, U> {
585    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
586        self.x.size_of(ops) + self.y.size_of(ops)
587    }
588}
589
590impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Rect<T, U> {
591    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
592        self.origin.size_of(ops) + self.size.size_of(ops)
593    }
594}
595
596impl<T: MallocSizeOf, U> MallocSizeOf for euclid::SideOffsets2D<T, U> {
597    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
598        self.top.size_of(ops)
599            + self.right.size_of(ops)
600            + self.bottom.size_of(ops)
601            + self.left.size_of(ops)
602    }
603}
604
605impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Size2D<T, U> {
606    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
607        self.width.size_of(ops) + self.height.size_of(ops)
608    }
609}
610
611impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform2D<T, Src, Dst> {
612    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
613        self.m11.size_of(ops)
614            + self.m12.size_of(ops)
615            + self.m21.size_of(ops)
616            + self.m22.size_of(ops)
617            + self.m31.size_of(ops)
618            + self.m32.size_of(ops)
619    }
620}
621
622impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform3D<T, Src, Dst> {
623    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
624        self.m11.size_of(ops)
625            + self.m12.size_of(ops)
626            + self.m13.size_of(ops)
627            + self.m14.size_of(ops)
628            + self.m21.size_of(ops)
629            + self.m22.size_of(ops)
630            + self.m23.size_of(ops)
631            + self.m24.size_of(ops)
632            + self.m31.size_of(ops)
633            + self.m32.size_of(ops)
634            + self.m33.size_of(ops)
635            + self.m34.size_of(ops)
636            + self.m41.size_of(ops)
637            + self.m42.size_of(ops)
638            + self.m43.size_of(ops)
639            + self.m44.size_of(ops)
640    }
641}
642
643impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Vector2D<T, U> {
644    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
645        self.x.size_of(ops) + self.y.size_of(ops)
646    }
647}
648
649impl MallocSizeOf for selectors::parser::AncestorHashes {
650    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
651        let selectors::parser::AncestorHashes { ref packed_hashes } = *self;
652        packed_hashes.size_of(ops)
653    }
654}
655
656impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
657    for selectors::parser::Selector<Impl>
658where
659    Impl::NonTSPseudoClass: MallocSizeOf,
660    Impl::PseudoElement: MallocSizeOf,
661{
662    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
663        let mut n = 0;
664
665        // It's OK to measure this ThinArc directly because it's the
666        // "primary" reference. (The secondary references are on the
667        // Stylist.)
668        n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
669        for component in self.iter_raw_match_order() {
670            n += component.size_of(ops);
671        }
672
673        n
674    }
675}
676
677impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
678    for selectors::parser::SelectorList<Impl>
679where
680    Impl::NonTSPseudoClass: MallocSizeOf,
681    Impl::PseudoElement: MallocSizeOf,
682{
683    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
684        let mut n = 0;
685
686        // It's OK to measure this ThinArc directly because it's the "primary" reference. (The
687        // secondary references are on the Stylist.)
688        n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
689        if self.len() > 1 {
690            for selector in self.slice().iter() {
691                n += selector.size_of(ops);
692            }
693        }
694        n
695    }
696}
697
698impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
699    for selectors::parser::Component<Impl>
700where
701    Impl::NonTSPseudoClass: MallocSizeOf,
702    Impl::PseudoElement: MallocSizeOf,
703{
704    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
705        use selectors::parser::Component;
706
707        match self {
708            Component::AttributeOther(ref attr_selector) => attr_selector.size_of(ops),
709            Component::Negation(ref components) => components.unconditional_size_of(ops),
710            Component::NonTSPseudoClass(ref pseudo) => (*pseudo).size_of(ops),
711            Component::Slotted(ref selector) | Component::Host(Some(ref selector)) => {
712                selector.unconditional_size_of(ops)
713            },
714            Component::Is(ref list) | Component::Where(ref list) => list.unconditional_size_of(ops),
715            Component::Has(ref relative_selectors) => relative_selectors.size_of(ops),
716            Component::NthOf(ref nth_of_data) => nth_of_data.size_of(ops),
717            Component::PseudoElement(ref pseudo) => (*pseudo).size_of(ops),
718            Component::Combinator(..)
719            | Component::ExplicitAnyNamespace
720            | Component::ExplicitNoNamespace
721            | Component::DefaultNamespace(..)
722            | Component::Namespace(..)
723            | Component::ExplicitUniversalType
724            | Component::LocalName(..)
725            | Component::ID(..)
726            | Component::Part(..)
727            | Component::Class(..)
728            | Component::AttributeInNoNamespaceExists { .. }
729            | Component::AttributeInNoNamespace { .. }
730            | Component::Root
731            | Component::Empty
732            | Component::Scope
733            | Component::ImplicitScope
734            | Component::ParentSelector
735            | Component::Nth(..)
736            | Component::Host(None)
737            | Component::RelativeSelectorAnchor
738            | Component::Invalid(..) => 0,
739        }
740    }
741}
742
743impl<Impl: selectors::parser::SelectorImpl> MallocSizeOf
744    for selectors::attr::AttrSelectorWithOptionalNamespace<Impl>
745{
746    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
747        0
748    }
749}
750
751impl MallocSizeOf for selectors::parser::AnPlusB {
752    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
753        0
754    }
755}
756
757impl MallocSizeOf for Void {
758    #[inline]
759    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
760        void::unreachable(*self)
761    }
762}
763
764#[cfg(feature = "servo")]
765impl<Static: string_cache::StaticAtomSet> MallocSizeOf for string_cache::Atom<Static> {
766    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
767        0
768    }
769}
770
771/// For use on types where size_of() returns 0.
772#[macro_export]
773macro_rules! malloc_size_of_is_0(
774    ($($ty:ty),+) => (
775        $(
776            impl $crate::MallocSizeOf for $ty {
777                #[inline(always)]
778                fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
779                    0
780                }
781            }
782        )+
783    );
784    ($($ty:ident<$($gen:ident),+>),+) => (
785        $(
786        impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
787            #[inline(always)]
788            fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
789                0
790            }
791        }
792        )+
793    );
794);
795
796malloc_size_of_is_0!(bool, char, str);
797malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
798malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
799malloc_size_of_is_0!(f32, f64);
800
801malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
802malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
803malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
804malloc_size_of_is_0!(std::num::NonZeroUsize);
805malloc_size_of_is_0!(std::num::NonZeroU64);
806
807malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
808malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
809malloc_size_of_is_0!(Range<f32>, Range<f64>);
810
811malloc_size_of_is_0!(app_units::Au);
812
813malloc_size_of_is_0!(
814    cssparser::TokenSerializationType,
815    cssparser::SourceLocation,
816    cssparser::SourcePosition
817);
818
819malloc_size_of_is_0!(selectors::OpaqueElement);
820
821/// Measurable that defers to inner value and used to verify MallocSizeOf implementation in a
822/// struct.
823#[derive(Clone)]
824pub struct Measurable<T: MallocSizeOf>(pub T);
825
826impl<T: MallocSizeOf> Deref for Measurable<T> {
827    type Target = T;
828
829    fn deref(&self) -> &T {
830        &self.0
831    }
832}
833
834impl<T: MallocSizeOf> DerefMut for Measurable<T> {
835    fn deref_mut(&mut self) -> &mut T {
836        &mut self.0
837    }
838}