wr_malloc_size_of/
lib.rs

1// Copyright 2016-2017 The Servo Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! A reduced fork of Firefox's malloc_size_of crate, for bundling with WebRender.
12
13#[cfg(feature = "app_units")]
14extern crate app_units;
15#[cfg(feature = "euclid")]
16extern crate euclid;
17
18use std::hash::{BuildHasher, Hash};
19use std::mem::size_of;
20use std::ops::Range;
21use std::os::raw::c_void;
22use std::path::PathBuf;
23
24/// A C function that takes a pointer to a heap allocation and returns its size.
25type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
26
27/// Operations used when measuring heap usage of data structures.
28pub struct MallocSizeOfOps {
29    /// A function that returns the size of a heap allocation.
30    pub size_of_op: VoidPtrToSizeFn,
31
32    /// Like `size_of_op`, but can take an interior pointer. Optional because
33    /// not all allocators support this operation. If it's not provided, some
34    /// memory measurements will actually be computed estimates rather than
35    /// real and accurate measurements.
36    pub enclosing_size_of_op: Option<VoidPtrToSizeFn>,
37}
38
39impl MallocSizeOfOps {
40    pub fn new(
41        size_of: VoidPtrToSizeFn,
42        malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
43    ) -> Self {
44        MallocSizeOfOps {
45            size_of_op: size_of,
46            enclosing_size_of_op: malloc_enclosing_size_of,
47        }
48    }
49
50    /// Check if an allocation is empty. This relies on knowledge of how Rust
51    /// handles empty allocations, which may change in the future.
52    fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
53        // The correct condition is this:
54        //   `ptr as usize <= ::std::mem::align_of::<T>()`
55        // But we can't call align_of() on a ?Sized T. So we approximate it
56        // with the following. 256 is large enough that it should always be
57        // larger than the required alignment, but small enough that it is
58        // always in the first page of memory and therefore not a legitimate
59        // address.
60        ptr as *const usize as usize <= 256
61    }
62
63    /// Call `size_of_op` on `ptr`, first checking that the allocation isn't
64    /// empty, because some types (such as `Vec`) utilize empty allocations.
65    pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
66        if MallocSizeOfOps::is_empty(ptr) {
67            0
68        } else {
69            (self.size_of_op)(ptr as *const c_void)
70        }
71    }
72
73    /// Is an `enclosing_size_of_op` available?
74    pub fn has_malloc_enclosing_size_of(&self) -> bool {
75        self.enclosing_size_of_op.is_some()
76    }
77
78    /// Call `enclosing_size_of_op`, which must be available, on `ptr`, which
79    /// must not be empty.
80    pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
81        assert!(!MallocSizeOfOps::is_empty(ptr));
82        (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
83    }
84}
85
86/// Trait for measuring the "deep" heap usage of a data structure. This is the
87/// most commonly-used of the traits.
88pub trait MallocSizeOf {
89    /// Measure the heap usage of all descendant heap-allocated structures, but
90    /// not the space taken up by the value itself.
91    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
92}
93
94/// Trait for measuring the "shallow" heap usage of a container.
95pub trait MallocShallowSizeOf {
96    /// Measure the heap usage of immediate heap-allocated descendant
97    /// structures, but not the space taken up by the value itself. Anything
98    /// beyond the immediate descendants must be measured separately, using
99    /// iteration.
100    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
101}
102
103impl MallocSizeOf for String {
104    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
105        unsafe { ops.malloc_size_of(self.as_ptr()) }
106    }
107}
108
109impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
110    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
111        unsafe { ops.malloc_size_of(&**self) }
112    }
113}
114
115impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
116    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
117        self.shallow_size_of(ops) + (**self).size_of(ops)
118    }
119}
120
121impl MallocSizeOf for () {
122    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
123        0
124    }
125}
126
127impl<T1, T2> MallocSizeOf for (T1, T2)
128where
129    T1: MallocSizeOf,
130    T2: MallocSizeOf,
131{
132    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
133        self.0.size_of(ops) + self.1.size_of(ops)
134    }
135}
136
137impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
138where
139    T1: MallocSizeOf,
140    T2: MallocSizeOf,
141    T3: MallocSizeOf,
142{
143    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
144        self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
145    }
146}
147
148impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
149where
150    T1: MallocSizeOf,
151    T2: MallocSizeOf,
152    T3: MallocSizeOf,
153    T4: MallocSizeOf,
154{
155    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
156        self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops) + self.3.size_of(ops)
157    }
158}
159
160impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
161    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
162        if let Some(val) = self.as_ref() {
163            val.size_of(ops)
164        } else {
165            0
166        }
167    }
168}
169
170impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
171    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
172        match *self {
173            Ok(ref x) => x.size_of(ops),
174            Err(ref e) => e.size_of(ops),
175        }
176    }
177}
178
179impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
180    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
181        self.get().size_of(ops)
182    }
183}
184
185impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
186    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
187        self.borrow().size_of(ops)
188    }
189}
190
191impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
192where
193    B::Owned: MallocSizeOf,
194{
195    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
196        match *self {
197            std::borrow::Cow::Borrowed(_) => 0,
198            std::borrow::Cow::Owned(ref b) => b.size_of(ops),
199        }
200    }
201}
202
203impl<T: MallocSizeOf> MallocSizeOf for [T] {
204    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
205        let mut n = 0;
206        for elem in self.iter() {
207            n += elem.size_of(ops);
208        }
209        n
210    }
211}
212
213impl<T> MallocShallowSizeOf for Vec<T> {
214    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
215        unsafe { ops.malloc_size_of(self.as_ptr()) }
216    }
217}
218
219impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
220    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
221        let mut n = self.shallow_size_of(ops);
222        for elem in self.iter() {
223            n += elem.size_of(ops);
224        }
225        n
226    }
227}
228
229macro_rules! malloc_size_of_hash_set {
230    ($ty:ty) => {
231        impl<T, S> MallocShallowSizeOf for $ty
232        where
233            T: Eq + Hash,
234            S: BuildHasher,
235        {
236            fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
237                if ops.has_malloc_enclosing_size_of() {
238                    // The first value from the iterator gives us an interior pointer.
239                    // `ops.malloc_enclosing_size_of()` then gives us the storage size.
240                    // This assumes that the `HashSet`'s contents (values and hashes)
241                    // are all stored in a single contiguous heap allocation.
242                    self.iter()
243                        .next()
244                        .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
245                } else {
246                    // An estimate.
247                    self.capacity() * (size_of::<T>() + size_of::<usize>())
248                }
249            }
250        }
251
252        impl<T, S> MallocSizeOf for $ty
253        where
254            T: Eq + Hash + MallocSizeOf,
255            S: BuildHasher,
256        {
257            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
258                let mut n = self.shallow_size_of(ops);
259                for t in self.iter() {
260                    n += t.size_of(ops);
261                }
262                n
263            }
264        }
265    };
266}
267
268malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
269
270macro_rules! malloc_size_of_hash_map {
271    ($ty:ty) => {
272        impl<K, V, S> MallocShallowSizeOf for $ty
273        where
274            K: Eq + Hash,
275            S: BuildHasher,
276        {
277            fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
278                // See the implementation for std::collections::HashSet for details.
279                if ops.has_malloc_enclosing_size_of() {
280                    self.values()
281                        .next()
282                        .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
283                } else {
284                    self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
285                }
286            }
287        }
288
289        impl<K, V, S> MallocSizeOf for $ty
290        where
291            K: Eq + Hash + MallocSizeOf,
292            V: MallocSizeOf,
293            S: BuildHasher,
294        {
295            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
296                let mut n = self.shallow_size_of(ops);
297                for (k, v) in self.iter() {
298                    n += k.size_of(ops);
299                    n += v.size_of(ops);
300                }
301                n
302            }
303        }
304    };
305}
306
307malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
308
309// PhantomData is always 0.
310impl<T> MallocSizeOf for std::marker::PhantomData<T> {
311    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
312        0
313    }
314}
315
316impl MallocSizeOf for PathBuf {
317    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
318        match self.to_str() {
319            Some(s) => unsafe { ops.malloc_size_of(s.as_ptr()) },
320            None => self.as_os_str().len(),
321        }
322    }
323}
324
325#[cfg(feature = "euclid")]
326impl<T: MallocSizeOf, Unit> MallocSizeOf for euclid::Length<T, Unit> {
327    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
328        self.0.size_of(ops)
329    }
330}
331
332#[cfg(feature = "euclid")]
333impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Scale<T, Src, Dst> {
334    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
335        self.0.size_of(ops)
336    }
337}
338
339#[cfg(feature = "euclid")]
340impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Point2D<T, U> {
341    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
342        self.x.size_of(ops) + self.y.size_of(ops)
343    }
344}
345
346#[cfg(feature = "euclid")]
347impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Rect<T, U> {
348    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
349        self.origin.size_of(ops) + self.size.size_of(ops)
350    }
351}
352
353#[cfg(feature = "euclid")]
354impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Box2D<T, U> {
355    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
356        self.min.size_of(ops) + self.max.size_of(ops)
357    }
358}
359
360#[cfg(feature = "euclid")]
361impl<T: MallocSizeOf, U> MallocSizeOf for euclid::SideOffsets2D<T, U> {
362    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
363        self.top.size_of(ops) +
364            self.right.size_of(ops) +
365            self.bottom.size_of(ops) +
366            self.left.size_of(ops)
367    }
368}
369
370#[cfg(feature = "euclid")]
371impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Size2D<T, U> {
372    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
373        self.width.size_of(ops) + self.height.size_of(ops)
374    }
375}
376
377#[cfg(feature = "euclid")]
378impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform2D<T, Src, Dst> {
379    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
380        self.m11.size_of(ops) +
381            self.m12.size_of(ops) +
382            self.m21.size_of(ops) +
383            self.m22.size_of(ops) +
384            self.m31.size_of(ops) +
385            self.m32.size_of(ops)
386    }
387}
388
389#[cfg(feature = "euclid")]
390impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform3D<T, Src, Dst> {
391    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
392        self.m11.size_of(ops) +
393            self.m12.size_of(ops) +
394            self.m13.size_of(ops) +
395            self.m14.size_of(ops) +
396            self.m21.size_of(ops) +
397            self.m22.size_of(ops) +
398            self.m23.size_of(ops) +
399            self.m24.size_of(ops) +
400            self.m31.size_of(ops) +
401            self.m32.size_of(ops) +
402            self.m33.size_of(ops) +
403            self.m34.size_of(ops) +
404            self.m41.size_of(ops) +
405            self.m42.size_of(ops) +
406            self.m43.size_of(ops) +
407            self.m44.size_of(ops)
408    }
409}
410
411#[cfg(feature = "euclid")]
412impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Vector2D<T, U> {
413    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
414        self.x.size_of(ops) + self.y.size_of(ops)
415    }
416}
417
418/// For use on types where size_of() returns 0.
419#[macro_export]
420macro_rules! malloc_size_of_is_0(
421    ($($ty:ty),+) => (
422        $(
423            impl $crate::MallocSizeOf for $ty {
424                #[inline(always)]
425                fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
426                    0
427                }
428            }
429        )+
430    );
431    ($($ty:ident<$($gen:ident),+>),+) => (
432        $(
433        impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
434            #[inline(always)]
435            fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
436                0
437            }
438        }
439        )+
440    );
441);
442
443malloc_size_of_is_0!(bool, char, str);
444malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
445malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
446malloc_size_of_is_0!(f32, f64);
447
448malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
449malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
450malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
451
452malloc_size_of_is_0!(std::num::NonZeroUsize);
453malloc_size_of_is_0!(std::num::NonZeroU32);
454
455malloc_size_of_is_0!(std::time::Duration);
456malloc_size_of_is_0!(std::time::Instant);
457malloc_size_of_is_0!(std::time::SystemTime);
458
459malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
460malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
461malloc_size_of_is_0!(Range<f32>, Range<f64>);
462
463#[cfg(feature = "app_units")]
464malloc_size_of_is_0!(app_units::Au);
465
466#[cfg(feature = "once_cell")]
467impl<T: MallocSizeOf, F: FnOnce() -> T> MallocSizeOf for once_cell::sync::Lazy<T, F> {
468    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
469        once_cell::sync::Lazy::get(self).map(|obj| obj.size_of(ops)).unwrap_or(0)
470    }
471}
472
473#[cfg(feature = "once_cell")]
474impl<T: MallocSizeOf> MallocSizeOf for once_cell::sync::OnceCell<T> {
475    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
476        once_cell::sync::OnceCell::get(self).map(|obj| obj.size_of(ops)).unwrap_or(0)
477    }
478}
479
480#[cfg(feature = "once_cell")]
481impl<T: MallocSizeOf, F: FnOnce() -> T> MallocSizeOf for &'static once_cell::sync::Lazy<T, F> {
482    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
483        once_cell::sync::Lazy::get(self).map(|obj| obj.size_of(ops)).unwrap_or(0)
484    }
485}