stylo_malloc_size_of/
lib.rs

1// Copyright 2016-2017 The Servo Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! A crate for measuring the heap usage of data structures in a way that
12//! integrates with Firefox's memory reporting, particularly the use of
13//! mozjemalloc and DMD. In particular, it has the following features.
14//! - It isn't bound to a particular heap allocator.
15//! - It provides traits for both "shallow" and "deep" measurement, which gives
16//!   flexibility in the cases where the traits can't be used.
17//! - It allows for measuring blocks even when only an interior pointer can be
18//!   obtained for heap allocations, e.g. `HashSet` and `HashMap`. (This relies
19//!   on the heap allocator having suitable support, which mozjemalloc has.)
20//! - It allows handling of types like `Rc` and `Arc` by providing traits that
21//!   are different to the ones for non-graph structures.
22//!
23//! Suggested uses are as follows.
24//! - When possible, use the `MallocSizeOf` trait. (Deriving support is
25//!   provided by the `malloc_size_of_derive` crate.)
26//! - If you need an additional synchronization argument, provide a function
27//!   that is like the standard trait method, but with the extra argument.
28//! - If you need multiple measurements for a type, provide a function named
29//!   `add_size_of` that takes a mutable reference to a struct that contains
30//!   the multiple measurement fields.
31//! - When deep measurement (via `MallocSizeOf`) cannot be implemented for a
32//!   type, shallow measurement (via `MallocShallowSizeOf`) in combination with
33//!   iteration can be a useful substitute.
34//! - `Rc` and `Arc` are always tricky, which is why `MallocSizeOf` is not (and
35//!   should not be) implemented for them.
36//! - If an `Rc` or `Arc` is known to be a "primary" reference and can always
37//!   be measured, it should be measured via the `MallocUnconditionalSizeOf`
38//!   trait.
39//! - If an `Rc` or `Arc` should be measured only if it hasn't been seen
40//!   before, it should be measured via the `MallocConditionalSizeOf` trait.
41//! - Using universal function call syntax is a good idea when measuring boxed
42//!   fields in structs, because it makes it clear that the Box is being
43//!   measured as well as the thing it points to. E.g.
44//!   `<Box<_> as MallocSizeOf>::size_of(field, ops)`.
45//!
46//!   Note: WebRender has a reduced fork of this crate, so that we can avoid
47//!   publishing this crate on crates.io.
48
49extern crate app_units;
50extern crate cssparser;
51extern crate euclid;
52extern crate selectors;
53extern crate servo_arc;
54extern crate smallbitvec;
55extern crate smallvec;
56extern crate void;
57
58use std::hash::{BuildHasher, Hash};
59use std::mem::size_of;
60use std::ops::Range;
61use std::ops::{Deref, DerefMut};
62use std::os::raw::c_void;
63use void::Void;
64
65/// A C function that takes a pointer to a heap allocation and returns its size.
66type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
67
68/// A closure implementing a stateful predicate on pointers.
69type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
70
71/// Operations used when measuring heap usage of data structures.
72pub struct MallocSizeOfOps {
73    /// A function that returns the size of a heap allocation.
74    size_of_op: VoidPtrToSizeFn,
75
76    /// Like `size_of_op`, but can take an interior pointer. Optional because
77    /// not all allocators support this operation. If it's not provided, some
78    /// memory measurements will actually be computed estimates rather than
79    /// real and accurate measurements.
80    enclosing_size_of_op: Option<VoidPtrToSizeFn>,
81
82    /// Check if a pointer has been seen before, and remember it for next time.
83    /// Useful when measuring `Rc`s and `Arc`s. Optional, because many places
84    /// don't need it.
85    have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
86}
87
88impl MallocSizeOfOps {
89    pub fn new(
90        size_of: VoidPtrToSizeFn,
91        malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
92        have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
93    ) -> Self {
94        MallocSizeOfOps {
95            size_of_op: size_of,
96            enclosing_size_of_op: malloc_enclosing_size_of,
97            have_seen_ptr_op: have_seen_ptr,
98        }
99    }
100
101    /// Check if an allocation is empty. This relies on knowledge of how Rust
102    /// handles empty allocations, which may change in the future.
103    fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
104        // The correct condition is this:
105        //   `ptr as usize <= ::std::mem::align_of::<T>()`
106        // But we can't call align_of() on a ?Sized T. So we approximate it
107        // with the following. 256 is large enough that it should always be
108        // larger than the required alignment, but small enough that it is
109        // always in the first page of memory and therefore not a legitimate
110        // address.
111        return ptr as *const usize as usize <= 256;
112    }
113
114    /// Call `size_of_op` on `ptr`, first checking that the allocation isn't
115    /// empty, because some types (such as `Vec`) utilize empty allocations.
116    pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
117        if MallocSizeOfOps::is_empty(ptr) {
118            0
119        } else {
120            (self.size_of_op)(ptr as *const c_void)
121        }
122    }
123
124    /// Is an `enclosing_size_of_op` available?
125    pub fn has_malloc_enclosing_size_of(&self) -> bool {
126        self.enclosing_size_of_op.is_some()
127    }
128
129    /// Call `enclosing_size_of_op`, which must be available, on `ptr`, which
130    /// must not be empty.
131    pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
132        assert!(!MallocSizeOfOps::is_empty(ptr));
133        (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
134    }
135
136    /// Call `have_seen_ptr_op` on `ptr`.
137    pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
138        let have_seen_ptr_op = self
139            .have_seen_ptr_op
140            .as_mut()
141            .expect("missing have_seen_ptr_op");
142        have_seen_ptr_op(ptr as *const c_void)
143    }
144}
145
146/// Trait for measuring the "deep" heap usage of a data structure. This is the
147/// most commonly-used of the traits.
148pub trait MallocSizeOf {
149    /// Measure the heap usage of all descendant heap-allocated structures, but
150    /// not the space taken up by the value itself.
151    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
152}
153
154/// Trait for measuring the "shallow" heap usage of a container.
155pub trait MallocShallowSizeOf {
156    /// Measure the heap usage of immediate heap-allocated descendant
157    /// structures, but not the space taken up by the value itself. Anything
158    /// beyond the immediate descendants must be measured separately, using
159    /// iteration.
160    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
161}
162
163/// Like `MallocSizeOf`, but with a different name so it cannot be used
164/// accidentally with derive(MallocSizeOf). For use with types like `Rc` and
165/// `Arc` when appropriate (e.g. when measuring a "primary" reference).
166pub trait MallocUnconditionalSizeOf {
167    /// Measure the heap usage of all heap-allocated descendant structures, but
168    /// not the space taken up by the value itself.
169    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
170}
171
172/// `MallocUnconditionalSizeOf` combined with `MallocShallowSizeOf`.
173pub trait MallocUnconditionalShallowSizeOf {
174    /// `unconditional_size_of` combined with `shallow_size_of`.
175    fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
176}
177
178/// Like `MallocSizeOf`, but only measures if the value hasn't already been
179/// measured. For use with types like `Rc` and `Arc` when appropriate (e.g.
180/// when there is no "primary" reference).
181pub trait MallocConditionalSizeOf {
182    /// Measure the heap usage of all heap-allocated descendant structures, but
183    /// not the space taken up by the value itself, and only if that heap usage
184    /// hasn't already been measured.
185    fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
186}
187
188/// `MallocConditionalSizeOf` combined with `MallocShallowSizeOf`.
189pub trait MallocConditionalShallowSizeOf {
190    /// `conditional_size_of` combined with `shallow_size_of`.
191    fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
192}
193
194impl MallocSizeOf for String {
195    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
196        unsafe { ops.malloc_size_of(self.as_ptr()) }
197    }
198}
199
200impl<'a, T: ?Sized> MallocSizeOf for &'a T {
201    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
202        // Zero makes sense for a non-owning reference.
203        0
204    }
205}
206
207impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
208    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
209        unsafe { ops.malloc_size_of(&**self) }
210    }
211}
212
213impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
214    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
215        self.shallow_size_of(ops) + (**self).size_of(ops)
216    }
217}
218
219impl MallocSizeOf for () {
220    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
221        0
222    }
223}
224
225impl<T1, T2> MallocSizeOf for (T1, T2)
226where
227    T1: MallocSizeOf,
228    T2: MallocSizeOf,
229{
230    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
231        self.0.size_of(ops) + self.1.size_of(ops)
232    }
233}
234
235impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
236where
237    T1: MallocSizeOf,
238    T2: MallocSizeOf,
239    T3: MallocSizeOf,
240{
241    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
242        self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
243    }
244}
245
246impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
247where
248    T1: MallocSizeOf,
249    T2: MallocSizeOf,
250    T3: MallocSizeOf,
251    T4: MallocSizeOf,
252{
253    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
254        self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops) + self.3.size_of(ops)
255    }
256}
257
258impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
259    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
260        if let Some(val) = self.as_ref() {
261            val.size_of(ops)
262        } else {
263            0
264        }
265    }
266}
267
268impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
269    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
270        match *self {
271            Ok(ref x) => x.size_of(ops),
272            Err(ref e) => e.size_of(ops),
273        }
274    }
275}
276
277impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
278    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
279        self.get().size_of(ops)
280    }
281}
282
283impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
284    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
285        self.borrow().size_of(ops)
286    }
287}
288
289impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
290where
291    B::Owned: MallocSizeOf,
292{
293    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
294        match *self {
295            std::borrow::Cow::Borrowed(_) => 0,
296            std::borrow::Cow::Owned(ref b) => b.size_of(ops),
297        }
298    }
299}
300
301impl<T: MallocSizeOf> MallocSizeOf for [T] {
302    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
303        let mut n = 0;
304        for elem in self.iter() {
305            n += elem.size_of(ops);
306        }
307        n
308    }
309}
310
311impl<T> MallocShallowSizeOf for Vec<T> {
312    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
313        unsafe { ops.malloc_size_of(self.as_ptr()) }
314    }
315}
316
317impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
318    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
319        let mut n = self.shallow_size_of(ops);
320        for elem in self.iter() {
321            n += elem.size_of(ops);
322        }
323        n
324    }
325}
326
327impl<T> MallocShallowSizeOf for std::collections::VecDeque<T> {
328    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
329        if ops.has_malloc_enclosing_size_of() {
330            if let Some(front) = self.front() {
331                // The front element is an interior pointer.
332                unsafe { ops.malloc_enclosing_size_of(&*front) }
333            } else {
334                // This assumes that no memory is allocated when the VecDeque is empty.
335                0
336            }
337        } else {
338            // An estimate.
339            self.capacity() * size_of::<T>()
340        }
341    }
342}
343
344impl<T: MallocSizeOf> MallocSizeOf for std::collections::VecDeque<T> {
345    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
346        let mut n = self.shallow_size_of(ops);
347        for elem in self.iter() {
348            n += elem.size_of(ops);
349        }
350        n
351    }
352}
353
354impl<A: smallvec::Array> MallocShallowSizeOf for smallvec::SmallVec<A> {
355    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
356        if self.spilled() {
357            unsafe { ops.malloc_size_of(self.as_ptr()) }
358        } else {
359            0
360        }
361    }
362}
363
364impl<A> MallocSizeOf for smallvec::SmallVec<A>
365where
366    A: smallvec::Array,
367    A::Item: MallocSizeOf,
368{
369    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
370        let mut n = self.shallow_size_of(ops);
371        for elem in self.iter() {
372            n += elem.size_of(ops);
373        }
374        n
375    }
376}
377
378impl<T> MallocShallowSizeOf for thin_vec::ThinVec<T> {
379    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
380        if self.capacity() == 0 {
381            // If it's the singleton we might not be a heap pointer.
382            return 0;
383        }
384
385        assert_eq!(
386            std::mem::size_of::<Self>(),
387            std::mem::size_of::<*const ()>()
388        );
389        unsafe { ops.malloc_size_of(*(self as *const Self as *const *const ())) }
390    }
391}
392
393impl<T: MallocSizeOf> MallocSizeOf for thin_vec::ThinVec<T> {
394    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
395        let mut n = self.shallow_size_of(ops);
396        for elem in self.iter() {
397            n += elem.size_of(ops);
398        }
399        n
400    }
401}
402
403macro_rules! malloc_size_of_hash_set {
404    ($ty:ty) => {
405        impl<T, S> MallocShallowSizeOf for $ty
406        where
407            T: Eq + Hash,
408            S: BuildHasher,
409        {
410            fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
411                if ops.has_malloc_enclosing_size_of() {
412                    // The first value from the iterator gives us an interior pointer.
413                    // `ops.malloc_enclosing_size_of()` then gives us the storage size.
414                    // This assumes that the `HashSet`'s contents (values and hashes)
415                    // are all stored in a single contiguous heap allocation.
416                    self.iter()
417                        .next()
418                        .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
419                } else {
420                    // An estimate.
421                    self.capacity() * (size_of::<T>() + size_of::<usize>())
422                }
423            }
424        }
425
426        impl<T, S> MallocSizeOf for $ty
427        where
428            T: Eq + Hash + MallocSizeOf,
429            S: BuildHasher,
430        {
431            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
432                let mut n = self.shallow_size_of(ops);
433                for t in self.iter() {
434                    n += t.size_of(ops);
435                }
436                n
437            }
438        }
439    };
440}
441
442malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
443
444macro_rules! malloc_size_of_hash_map {
445    ($ty:ty) => {
446        impl<K, V, S> MallocShallowSizeOf for $ty
447        where
448            K: Eq + Hash,
449            S: BuildHasher,
450        {
451            fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
452                // See the implementation for std::collections::HashSet for details.
453                if ops.has_malloc_enclosing_size_of() {
454                    self.values()
455                        .next()
456                        .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
457                } else {
458                    self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
459                }
460            }
461        }
462
463        impl<K, V, S> MallocSizeOf for $ty
464        where
465            K: Eq + Hash + MallocSizeOf,
466            V: MallocSizeOf,
467            S: BuildHasher,
468        {
469            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
470                let mut n = self.shallow_size_of(ops);
471                for (k, v) in self.iter() {
472                    n += k.size_of(ops);
473                    n += v.size_of(ops);
474                }
475                n
476            }
477        }
478    };
479}
480
481malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
482
483impl<K, V> MallocShallowSizeOf for std::collections::BTreeMap<K, V>
484where
485    K: Eq + Hash,
486{
487    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
488        if ops.has_malloc_enclosing_size_of() {
489            self.values()
490                .next()
491                .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
492        } else {
493            self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
494        }
495    }
496}
497
498impl<K, V> MallocSizeOf for std::collections::BTreeMap<K, V>
499where
500    K: Eq + Hash + MallocSizeOf,
501    V: MallocSizeOf,
502{
503    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
504        let mut n = self.shallow_size_of(ops);
505        for (k, v) in self.iter() {
506            n += k.size_of(ops);
507            n += v.size_of(ops);
508        }
509        n
510    }
511}
512
513// PhantomData is always 0.
514impl<T> MallocSizeOf for std::marker::PhantomData<T> {
515    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
516        0
517    }
518}
519
520// XXX: we don't want MallocSizeOf to be defined for Rc and Arc. If negative
521// trait bounds are ever allowed, this code should be uncommented.
522// (We do have a compile-fail test for this:
523// rc_arc_must_not_derive_malloc_size_of.rs)
524//impl<T> !MallocSizeOf for Arc<T> { }
525//impl<T> !MallocShallowSizeOf for Arc<T> { }
526
527impl<T> MallocUnconditionalShallowSizeOf for servo_arc::Arc<T> {
528    fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
529        unsafe { ops.malloc_size_of(self.heap_ptr()) }
530    }
531}
532
533impl<T: MallocSizeOf> MallocUnconditionalSizeOf for servo_arc::Arc<T> {
534    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
535        self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
536    }
537}
538
539impl<T> MallocConditionalShallowSizeOf for servo_arc::Arc<T> {
540    fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
541        if ops.have_seen_ptr(self.heap_ptr()) {
542            0
543        } else {
544            self.unconditional_shallow_size_of(ops)
545        }
546    }
547}
548
549impl<T: MallocSizeOf> MallocConditionalSizeOf for servo_arc::Arc<T> {
550    fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
551        if ops.have_seen_ptr(self.heap_ptr()) {
552            0
553        } else {
554            self.unconditional_size_of(ops)
555        }
556    }
557}
558
559/// If a mutex is stored directly as a member of a data type that is being measured,
560/// it is the unique owner of its contents and deserves to be measured.
561///
562/// If a mutex is stored inside of an Arc value as a member of a data type that is being measured,
563/// the Arc will not be automatically measured so there is no risk of overcounting the mutex's
564/// contents.
565impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
566    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
567        (*self.lock().unwrap()).size_of(ops)
568    }
569}
570
571impl MallocSizeOf for smallbitvec::SmallBitVec {
572    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
573        if let Some(ptr) = self.heap_ptr() {
574            unsafe { ops.malloc_size_of(ptr) }
575        } else {
576            0
577        }
578    }
579}
580
581impl<T: MallocSizeOf, Unit> MallocSizeOf for euclid::Length<T, Unit> {
582    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
583        self.0.size_of(ops)
584    }
585}
586
587impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Scale<T, Src, Dst> {
588    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
589        self.0.size_of(ops)
590    }
591}
592
593impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Point2D<T, U> {
594    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
595        self.x.size_of(ops) + self.y.size_of(ops)
596    }
597}
598
599impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Rect<T, U> {
600    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
601        self.origin.size_of(ops) + self.size.size_of(ops)
602    }
603}
604
605impl<T: MallocSizeOf, U> MallocSizeOf for euclid::SideOffsets2D<T, U> {
606    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
607        self.top.size_of(ops)
608            + self.right.size_of(ops)
609            + self.bottom.size_of(ops)
610            + self.left.size_of(ops)
611    }
612}
613
614impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Size2D<T, U> {
615    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
616        self.width.size_of(ops) + self.height.size_of(ops)
617    }
618}
619
620impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform2D<T, Src, Dst> {
621    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
622        self.m11.size_of(ops)
623            + self.m12.size_of(ops)
624            + self.m21.size_of(ops)
625            + self.m22.size_of(ops)
626            + self.m31.size_of(ops)
627            + self.m32.size_of(ops)
628    }
629}
630
631impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform3D<T, Src, Dst> {
632    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
633        self.m11.size_of(ops)
634            + self.m12.size_of(ops)
635            + self.m13.size_of(ops)
636            + self.m14.size_of(ops)
637            + self.m21.size_of(ops)
638            + self.m22.size_of(ops)
639            + self.m23.size_of(ops)
640            + self.m24.size_of(ops)
641            + self.m31.size_of(ops)
642            + self.m32.size_of(ops)
643            + self.m33.size_of(ops)
644            + self.m34.size_of(ops)
645            + self.m41.size_of(ops)
646            + self.m42.size_of(ops)
647            + self.m43.size_of(ops)
648            + self.m44.size_of(ops)
649    }
650}
651
652impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Vector2D<T, U> {
653    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
654        self.x.size_of(ops) + self.y.size_of(ops)
655    }
656}
657
658impl MallocSizeOf for selectors::parser::AncestorHashes {
659    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
660        let selectors::parser::AncestorHashes { ref packed_hashes } = *self;
661        packed_hashes.size_of(ops)
662    }
663}
664
665impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
666    for selectors::parser::Selector<Impl>
667where
668    Impl::NonTSPseudoClass: MallocSizeOf,
669    Impl::PseudoElement: MallocSizeOf,
670{
671    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
672        let mut n = 0;
673
674        // It's OK to measure this ThinArc directly because it's the
675        // "primary" reference. (The secondary references are on the
676        // Stylist.)
677        n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
678        for component in self.iter_raw_match_order() {
679            n += component.size_of(ops);
680        }
681
682        n
683    }
684}
685
686impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
687    for selectors::parser::SelectorList<Impl>
688where
689    Impl::NonTSPseudoClass: MallocSizeOf,
690    Impl::PseudoElement: MallocSizeOf,
691{
692    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
693        let mut n = 0;
694
695        // It's OK to measure this ThinArc directly because it's the "primary" reference. (The
696        // secondary references are on the Stylist.)
697        n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
698        if self.len() > 1 {
699            for selector in self.slice().iter() {
700                n += selector.size_of(ops);
701            }
702        }
703        n
704    }
705}
706
707impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
708    for selectors::parser::Component<Impl>
709where
710    Impl::NonTSPseudoClass: MallocSizeOf,
711    Impl::PseudoElement: MallocSizeOf,
712{
713    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
714        use selectors::parser::Component;
715
716        match self {
717            Component::AttributeOther(ref attr_selector) => attr_selector.size_of(ops),
718            Component::Negation(ref components) => components.unconditional_size_of(ops),
719            Component::NonTSPseudoClass(ref pseudo) => (*pseudo).size_of(ops),
720            Component::Slotted(ref selector) | Component::Host(Some(ref selector)) => {
721                selector.unconditional_size_of(ops)
722            },
723            Component::Is(ref list) | Component::Where(ref list) => list.unconditional_size_of(ops),
724            Component::Has(ref relative_selectors) => relative_selectors.size_of(ops),
725            Component::NthOf(ref nth_of_data) => nth_of_data.size_of(ops),
726            Component::PseudoElement(ref pseudo) => (*pseudo).size_of(ops),
727            Component::Combinator(..)
728            | Component::ExplicitAnyNamespace
729            | Component::ExplicitNoNamespace
730            | Component::DefaultNamespace(..)
731            | Component::Namespace(..)
732            | Component::ExplicitUniversalType
733            | Component::LocalName(..)
734            | Component::ID(..)
735            | Component::Part(..)
736            | Component::Class(..)
737            | Component::AttributeInNoNamespaceExists { .. }
738            | Component::AttributeInNoNamespace { .. }
739            | Component::Root
740            | Component::Empty
741            | Component::Scope
742            | Component::ImplicitScope
743            | Component::ParentSelector
744            | Component::Nth(..)
745            | Component::Host(None)
746            | Component::RelativeSelectorAnchor
747            | Component::Invalid(..) => 0,
748        }
749    }
750}
751
752impl<Impl: selectors::parser::SelectorImpl> MallocSizeOf
753    for selectors::attr::AttrSelectorWithOptionalNamespace<Impl>
754{
755    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
756        0
757    }
758}
759
760impl MallocSizeOf for selectors::parser::AnPlusB {
761    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
762        0
763    }
764}
765
766impl MallocSizeOf for Void {
767    #[inline]
768    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
769        void::unreachable(*self)
770    }
771}
772
773#[cfg(feature = "servo")]
774impl<Static: string_cache::StaticAtomSet> MallocSizeOf for string_cache::Atom<Static> {
775    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
776        0
777    }
778}
779
780/// For use on types where size_of() returns 0.
781#[macro_export]
782macro_rules! malloc_size_of_is_0(
783    ($($ty:ty),+) => (
784        $(
785            impl $crate::MallocSizeOf for $ty {
786                #[inline(always)]
787                fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
788                    0
789                }
790            }
791        )+
792    );
793    ($($ty:ident<$($gen:ident),+>),+) => (
794        $(
795        impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
796            #[inline(always)]
797            fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
798                0
799            }
800        }
801        )+
802    );
803);
804
805malloc_size_of_is_0!(bool, char, str);
806malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
807malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
808malloc_size_of_is_0!(f32, f64);
809
810malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
811malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
812malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
813malloc_size_of_is_0!(std::num::NonZeroUsize);
814malloc_size_of_is_0!(std::num::NonZeroU64);
815
816malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
817malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
818malloc_size_of_is_0!(Range<f32>, Range<f64>);
819
820malloc_size_of_is_0!(app_units::Au);
821
822malloc_size_of_is_0!(
823    cssparser::TokenSerializationType,
824    cssparser::SourceLocation,
825    cssparser::SourcePosition
826);
827
828malloc_size_of_is_0!(selectors::OpaqueElement);
829
830/// Measurable that defers to inner value and used to verify MallocSizeOf implementation in a
831/// struct.
832#[derive(Clone)]
833pub struct Measurable<T: MallocSizeOf>(pub T);
834
835impl<T: MallocSizeOf> Deref for Measurable<T> {
836    type Target = T;
837
838    fn deref(&self) -> &T {
839        &self.0
840    }
841}
842
843impl<T: MallocSizeOf> DerefMut for Measurable<T> {
844    fn deref_mut(&mut self) -> &mut T {
845        &mut self.0
846    }
847}