wr_malloc_size_of/
lib.rs

1// Copyright 2016-2017 The Servo Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! A reduced fork of Firefox's malloc_size_of crate, for bundling with WebRender.
12
13#[cfg(feature = "app_units")]
14extern crate app_units;
15#[cfg(feature = "euclid")]
16extern crate euclid;
17
18use std::collections::{BTreeMap, BTreeSet};
19use std::hash::{BuildHasher, Hash};
20use std::mem::size_of;
21use std::ops::Range;
22use std::os::raw::c_void;
23use std::path::PathBuf;
24
25/// A C function that takes a pointer to a heap allocation and returns its size.
26type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
27
28/// Operations used when measuring heap usage of data structures.
29pub struct MallocSizeOfOps {
30    /// A function that returns the size of a heap allocation.
31    pub size_of_op: VoidPtrToSizeFn,
32
33    /// Like `size_of_op`, but can take an interior pointer. Optional because
34    /// not all allocators support this operation. If it's not provided, some
35    /// memory measurements will actually be computed estimates rather than
36    /// real and accurate measurements.
37    pub enclosing_size_of_op: Option<VoidPtrToSizeFn>,
38}
39
40impl MallocSizeOfOps {
41    pub fn new(
42        size_of: VoidPtrToSizeFn,
43        malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
44    ) -> Self {
45        MallocSizeOfOps {
46            size_of_op: size_of,
47            enclosing_size_of_op: malloc_enclosing_size_of,
48        }
49    }
50
51    /// Check if an allocation is empty. This relies on knowledge of how Rust
52    /// handles empty allocations, which may change in the future.
53    fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
54        // The correct condition is this:
55        //   `ptr as usize <= ::std::mem::align_of::<T>()`
56        // But we can't call align_of() on a ?Sized T. So we approximate it
57        // with the following. 256 is large enough that it should always be
58        // larger than the required alignment, but small enough that it is
59        // always in the first page of memory and therefore not a legitimate
60        // address.
61        ptr as *const usize as usize <= 256
62    }
63
64    /// Call `size_of_op` on `ptr`, first checking that the allocation isn't
65    /// empty, because some types (such as `Vec`) utilize empty allocations.
66    pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
67        if MallocSizeOfOps::is_empty(ptr) {
68            0
69        } else {
70            (self.size_of_op)(ptr as *const c_void)
71        }
72    }
73
74    /// Is an `enclosing_size_of_op` available?
75    pub fn has_malloc_enclosing_size_of(&self) -> bool {
76        self.enclosing_size_of_op.is_some()
77    }
78
79    /// Call `enclosing_size_of_op`, which must be available, on `ptr`, which
80    /// must not be empty.
81    pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
82        assert!(!MallocSizeOfOps::is_empty(ptr));
83        (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
84    }
85}
86
87/// Trait for measuring the "deep" heap usage of a data structure. This is the
88/// most commonly-used of the traits.
89pub trait MallocSizeOf {
90    /// Measure the heap usage of all descendant heap-allocated structures, but
91    /// not the space taken up by the value itself.
92    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
93}
94
95/// Trait for measuring the "shallow" heap usage of a container.
96pub trait MallocShallowSizeOf {
97    /// Measure the heap usage of immediate heap-allocated descendant
98    /// structures, but not the space taken up by the value itself. Anything
99    /// beyond the immediate descendants must be measured separately, using
100    /// iteration.
101    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
102}
103
104impl MallocSizeOf for String {
105    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
106        unsafe { ops.malloc_size_of(self.as_ptr()) }
107    }
108}
109
110impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
111    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
112        unsafe { ops.malloc_size_of(&**self) }
113    }
114}
115
116impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
117    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
118        self.shallow_size_of(ops) + (**self).size_of(ops)
119    }
120}
121
122impl MallocSizeOf for () {
123    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
124        0
125    }
126}
127
128impl<T1, T2> MallocSizeOf for (T1, T2)
129where
130    T1: MallocSizeOf,
131    T2: MallocSizeOf,
132{
133    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
134        self.0.size_of(ops) + self.1.size_of(ops)
135    }
136}
137
138impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
139where
140    T1: MallocSizeOf,
141    T2: MallocSizeOf,
142    T3: MallocSizeOf,
143{
144    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
145        self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
146    }
147}
148
149impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
150where
151    T1: MallocSizeOf,
152    T2: MallocSizeOf,
153    T3: MallocSizeOf,
154    T4: MallocSizeOf,
155{
156    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
157        self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops) + self.3.size_of(ops)
158    }
159}
160
161impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
162    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
163        if let Some(val) = self.as_ref() {
164            val.size_of(ops)
165        } else {
166            0
167        }
168    }
169}
170
171impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
172    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
173        match *self {
174            Ok(ref x) => x.size_of(ops),
175            Err(ref e) => e.size_of(ops),
176        }
177    }
178}
179
180impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
181    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
182        self.get().size_of(ops)
183    }
184}
185
186impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
187    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
188        self.borrow().size_of(ops)
189    }
190}
191
192impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
193where
194    B::Owned: MallocSizeOf,
195{
196    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
197        match *self {
198            std::borrow::Cow::Borrowed(_) => 0,
199            std::borrow::Cow::Owned(ref b) => b.size_of(ops),
200        }
201    }
202}
203
204impl<T: MallocSizeOf> MallocSizeOf for [T] {
205    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
206        let mut n = 0;
207        for elem in self.iter() {
208            n += elem.size_of(ops);
209        }
210        n
211    }
212}
213
214impl<T> MallocShallowSizeOf for Vec<T> {
215    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
216        unsafe { ops.malloc_size_of(self.as_ptr()) }
217    }
218}
219
220impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
221    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
222        let mut n = self.shallow_size_of(ops);
223        for elem in self.iter() {
224            n += elem.size_of(ops);
225        }
226        n
227    }
228}
229
230macro_rules! malloc_size_of_hash_set {
231    ($ty:ty) => {
232        impl<T, S> MallocShallowSizeOf for $ty
233        where
234            T: Eq + Hash,
235            S: BuildHasher,
236        {
237            fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
238                if ops.has_malloc_enclosing_size_of() {
239                    // The first value from the iterator gives us an interior pointer.
240                    // `ops.malloc_enclosing_size_of()` then gives us the storage size.
241                    // This assumes that the `HashSet`'s contents (values and hashes)
242                    // are all stored in a single contiguous heap allocation.
243                    self.iter()
244                        .next()
245                        .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
246                } else {
247                    // An estimate.
248                    self.capacity() * (size_of::<T>() + size_of::<usize>())
249                }
250            }
251        }
252
253        impl<T, S> MallocSizeOf for $ty
254        where
255            T: Eq + Hash + MallocSizeOf,
256            S: BuildHasher,
257        {
258            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
259                let mut n = self.shallow_size_of(ops);
260                for t in self.iter() {
261                    n += t.size_of(ops);
262                }
263                n
264            }
265        }
266    };
267}
268
269malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
270
271macro_rules! malloc_size_of_hash_map {
272    ($ty:ty) => {
273        impl<K, V, S> MallocShallowSizeOf for $ty
274        where
275            K: Eq + Hash,
276            S: BuildHasher,
277        {
278            fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
279                // See the implementation for std::collections::HashSet for details.
280                if ops.has_malloc_enclosing_size_of() {
281                    self.values()
282                        .next()
283                        .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
284                } else {
285                    self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
286                }
287            }
288        }
289
290        impl<K, V, S> MallocSizeOf for $ty
291        where
292            K: Eq + Hash + MallocSizeOf,
293            V: MallocSizeOf,
294            S: BuildHasher,
295        {
296            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
297                let mut n = self.shallow_size_of(ops);
298                for (k, v) in self.iter() {
299                    n += k.size_of(ops);
300                    n += v.size_of(ops);
301                }
302                n
303            }
304        }
305    };
306}
307
308malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
309
310impl<K, V> MallocShallowSizeOf for BTreeMap<K, V> {
311    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
312        if ops.has_malloc_enclosing_size_of() {
313            self.values()
314                .next()
315                .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
316        } else {
317            self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
318        }
319    }
320}
321
322impl<K, V> MallocSizeOf for BTreeMap<K, V>
323where
324    K: MallocSizeOf,
325    V: MallocSizeOf,
326{
327    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
328        let mut n = self.shallow_size_of(ops);
329        for (k, v) in self.iter() {
330            n += k.size_of(ops);
331            n += v.size_of(ops);
332        }
333        n
334    }
335}
336
337impl<T> MallocShallowSizeOf for BTreeSet<T> {
338    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
339        if ops.has_malloc_enclosing_size_of() {
340            self.iter()
341                .next()
342                .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
343        } else {
344            self.len() * (size_of::<T>() + size_of::<usize>())
345        }
346    }
347}
348
349impl<T> MallocSizeOf for BTreeSet<T>
350where
351    T: MallocSizeOf,
352{
353    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
354        let mut n = self.shallow_size_of(ops);
355        for v in self.iter() {
356            n += v.size_of(ops);
357        }
358        n
359    }
360}
361
362// PhantomData is always 0.
363impl<T> MallocSizeOf for std::marker::PhantomData<T> {
364    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
365        0
366    }
367}
368
369impl MallocSizeOf for PathBuf {
370    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
371        match self.to_str() {
372            Some(s) => unsafe { ops.malloc_size_of(s.as_ptr()) },
373            None => self.as_os_str().len(),
374        }
375    }
376}
377
378#[cfg(feature = "euclid")]
379impl<T: MallocSizeOf, Unit> MallocSizeOf for euclid::Length<T, Unit> {
380    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
381        self.0.size_of(ops)
382    }
383}
384
385#[cfg(feature = "euclid")]
386impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Scale<T, Src, Dst> {
387    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
388        self.0.size_of(ops)
389    }
390}
391
392#[cfg(feature = "euclid")]
393impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Point2D<T, U> {
394    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
395        self.x.size_of(ops) + self.y.size_of(ops)
396    }
397}
398
399#[cfg(feature = "euclid")]
400impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Rect<T, U> {
401    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
402        self.origin.size_of(ops) + self.size.size_of(ops)
403    }
404}
405
406#[cfg(feature = "euclid")]
407impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Box2D<T, U> {
408    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
409        self.min.size_of(ops) + self.max.size_of(ops)
410    }
411}
412
413#[cfg(feature = "euclid")]
414impl<T: MallocSizeOf, U> MallocSizeOf for euclid::SideOffsets2D<T, U> {
415    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
416        self.top.size_of(ops) +
417            self.right.size_of(ops) +
418            self.bottom.size_of(ops) +
419            self.left.size_of(ops)
420    }
421}
422
423#[cfg(feature = "euclid")]
424impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Size2D<T, U> {
425    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
426        self.width.size_of(ops) + self.height.size_of(ops)
427    }
428}
429
430#[cfg(feature = "euclid")]
431impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform2D<T, Src, Dst> {
432    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
433        self.m11.size_of(ops) +
434            self.m12.size_of(ops) +
435            self.m21.size_of(ops) +
436            self.m22.size_of(ops) +
437            self.m31.size_of(ops) +
438            self.m32.size_of(ops)
439    }
440}
441
442#[cfg(feature = "euclid")]
443impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform3D<T, Src, Dst> {
444    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
445        self.m11.size_of(ops) +
446            self.m12.size_of(ops) +
447            self.m13.size_of(ops) +
448            self.m14.size_of(ops) +
449            self.m21.size_of(ops) +
450            self.m22.size_of(ops) +
451            self.m23.size_of(ops) +
452            self.m24.size_of(ops) +
453            self.m31.size_of(ops) +
454            self.m32.size_of(ops) +
455            self.m33.size_of(ops) +
456            self.m34.size_of(ops) +
457            self.m41.size_of(ops) +
458            self.m42.size_of(ops) +
459            self.m43.size_of(ops) +
460            self.m44.size_of(ops)
461    }
462}
463
464#[cfg(feature = "euclid")]
465impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Vector2D<T, U> {
466    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
467        self.x.size_of(ops) + self.y.size_of(ops)
468    }
469}
470
471/// For use on types where size_of() returns 0.
472#[macro_export]
473macro_rules! malloc_size_of_is_0(
474    ($($ty:ty),+) => (
475        $(
476            impl $crate::MallocSizeOf for $ty {
477                #[inline(always)]
478                fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
479                    0
480                }
481            }
482        )+
483    );
484    ($($ty:ident<$($gen:ident),+>),+) => (
485        $(
486        impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
487            #[inline(always)]
488            fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
489                0
490            }
491        }
492        )+
493    );
494);
495
496malloc_size_of_is_0!(bool, char, str);
497malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
498malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
499malloc_size_of_is_0!(f32, f64);
500
501malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
502malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
503malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
504malloc_size_of_is_0!(
505    std::sync::atomic::AtomicU8,
506    std::sync::atomic::AtomicU16,
507    std::sync::atomic::AtomicU32,
508    std::sync::atomic::AtomicU64
509);
510malloc_size_of_is_0!(
511    std::sync::atomic::AtomicI8,
512    std::sync::atomic::AtomicI16,
513    std::sync::atomic::AtomicI32,
514    std::sync::atomic::AtomicI64
515);
516
517malloc_size_of_is_0!(std::num::NonZeroUsize);
518malloc_size_of_is_0!(std::num::NonZeroU32);
519
520malloc_size_of_is_0!(std::time::Duration);
521malloc_size_of_is_0!(std::time::Instant);
522malloc_size_of_is_0!(std::time::SystemTime);
523
524malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
525malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
526malloc_size_of_is_0!(Range<f32>, Range<f64>);
527
528#[cfg(feature = "app_units")]
529malloc_size_of_is_0!(app_units::Au);
530
531#[cfg(feature = "once_cell")]
532impl<T: MallocSizeOf, F: FnOnce() -> T> MallocSizeOf for once_cell::sync::Lazy<T, F> {
533    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
534        once_cell::sync::Lazy::get(self).map(|obj| obj.size_of(ops)).unwrap_or(0)
535    }
536}
537
538#[cfg(feature = "once_cell")]
539impl<T: MallocSizeOf> MallocSizeOf for once_cell::sync::OnceCell<T> {
540    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
541        once_cell::sync::OnceCell::get(self).map(|obj| obj.size_of(ops)).unwrap_or(0)
542    }
543}
544
545#[cfg(feature = "once_cell")]
546impl<T: MallocSizeOf, F: FnOnce() -> T> MallocSizeOf for &'static once_cell::sync::Lazy<T, F> {
547    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
548        once_cell::sync::Lazy::get(self).map(|obj| obj.size_of(ops)).unwrap_or(0)
549    }
550}