1use std::hash::{BuildHasher, Hash};
50use std::mem::size_of;
51use std::ops::Range;
52use std::ops::{Deref, DerefMut};
53use std::os::raw::c_void;
54use void::Void;
55
56type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
58
59type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
61
62pub struct MallocSizeOfOps {
64 size_of_op: VoidPtrToSizeFn,
66
67 enclosing_size_of_op: Option<VoidPtrToSizeFn>,
72
73 have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
77}
78
79impl MallocSizeOfOps {
80 pub fn new(
81 size_of: VoidPtrToSizeFn,
82 malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
83 have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
84 ) -> Self {
85 MallocSizeOfOps {
86 size_of_op: size_of,
87 enclosing_size_of_op: malloc_enclosing_size_of,
88 have_seen_ptr_op: have_seen_ptr,
89 }
90 }
91
92 fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
95 return ptr as *const usize as usize <= 256;
103 }
104
105 pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
108 if MallocSizeOfOps::is_empty(ptr) {
109 0
110 } else {
111 (self.size_of_op)(ptr as *const c_void)
112 }
113 }
114
115 pub fn has_malloc_enclosing_size_of(&self) -> bool {
117 self.enclosing_size_of_op.is_some()
118 }
119
120 pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
123 assert!(!MallocSizeOfOps::is_empty(ptr));
124 (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
125 }
126
127 pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
129 let have_seen_ptr_op = self
130 .have_seen_ptr_op
131 .as_mut()
132 .expect("missing have_seen_ptr_op");
133 have_seen_ptr_op(ptr as *const c_void)
134 }
135}
136
137pub trait MallocSizeOf {
140 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
143}
144
145pub trait MallocShallowSizeOf {
147 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
152}
153
154pub trait MallocUnconditionalSizeOf {
158 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
161}
162
163pub trait MallocUnconditionalShallowSizeOf {
165 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
167}
168
169pub trait MallocConditionalSizeOf {
173 fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
177}
178
179pub trait MallocConditionalShallowSizeOf {
181 fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
183}
184
185impl MallocSizeOf for String {
186 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
187 unsafe { ops.malloc_size_of(self.as_ptr()) }
188 }
189}
190
191impl<'a, T: ?Sized> MallocSizeOf for &'a T {
192 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
193 0
195 }
196}
197
198impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
199 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
200 unsafe { ops.malloc_size_of(&**self) }
201 }
202}
203
204impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
205 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
206 self.shallow_size_of(ops) + (**self).size_of(ops)
207 }
208}
209
210impl MallocSizeOf for () {
211 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
212 0
213 }
214}
215
216impl<T1, T2> MallocSizeOf for (T1, T2)
217where
218 T1: MallocSizeOf,
219 T2: MallocSizeOf,
220{
221 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
222 self.0.size_of(ops) + self.1.size_of(ops)
223 }
224}
225
226impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
227where
228 T1: MallocSizeOf,
229 T2: MallocSizeOf,
230 T3: MallocSizeOf,
231{
232 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
233 self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
234 }
235}
236
237impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
238where
239 T1: MallocSizeOf,
240 T2: MallocSizeOf,
241 T3: MallocSizeOf,
242 T4: MallocSizeOf,
243{
244 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
245 self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops) + self.3.size_of(ops)
246 }
247}
248
249impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
250 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
251 if let Some(val) = self.as_ref() {
252 val.size_of(ops)
253 } else {
254 0
255 }
256 }
257}
258
259impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
260 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
261 match *self {
262 Ok(ref x) => x.size_of(ops),
263 Err(ref e) => e.size_of(ops),
264 }
265 }
266}
267
268impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
269 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
270 self.get().size_of(ops)
271 }
272}
273
274impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
275 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
276 self.borrow().size_of(ops)
277 }
278}
279
280impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
281where
282 B::Owned: MallocSizeOf,
283{
284 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
285 match *self {
286 std::borrow::Cow::Borrowed(_) => 0,
287 std::borrow::Cow::Owned(ref b) => b.size_of(ops),
288 }
289 }
290}
291
292impl<T: MallocSizeOf> MallocSizeOf for [T] {
293 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
294 let mut n = 0;
295 for elem in self.iter() {
296 n += elem.size_of(ops);
297 }
298 n
299 }
300}
301
302impl<T> MallocShallowSizeOf for Vec<T> {
303 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
304 unsafe { ops.malloc_size_of(self.as_ptr()) }
305 }
306}
307
308impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
309 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
310 let mut n = self.shallow_size_of(ops);
311 for elem in self.iter() {
312 n += elem.size_of(ops);
313 }
314 n
315 }
316}
317
318impl<T> MallocShallowSizeOf for std::collections::VecDeque<T> {
319 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
320 if ops.has_malloc_enclosing_size_of() {
321 if let Some(front) = self.front() {
322 unsafe { ops.malloc_enclosing_size_of(&*front) }
324 } else {
325 0
327 }
328 } else {
329 self.capacity() * size_of::<T>()
331 }
332 }
333}
334
335impl<T: MallocSizeOf> MallocSizeOf for std::collections::VecDeque<T> {
336 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
337 let mut n = self.shallow_size_of(ops);
338 for elem in self.iter() {
339 n += elem.size_of(ops);
340 }
341 n
342 }
343}
344
345impl<A: smallvec::Array> MallocShallowSizeOf for smallvec::SmallVec<A> {
346 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
347 if self.spilled() {
348 unsafe { ops.malloc_size_of(self.as_ptr()) }
349 } else {
350 0
351 }
352 }
353}
354
355impl<A> MallocSizeOf for smallvec::SmallVec<A>
356where
357 A: smallvec::Array,
358 A::Item: MallocSizeOf,
359{
360 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
361 let mut n = self.shallow_size_of(ops);
362 for elem in self.iter() {
363 n += elem.size_of(ops);
364 }
365 n
366 }
367}
368
369impl<T> MallocShallowSizeOf for thin_vec::ThinVec<T> {
370 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
371 if self.capacity() == 0 {
372 return 0;
374 }
375
376 assert_eq!(
377 std::mem::size_of::<Self>(),
378 std::mem::size_of::<*const ()>()
379 );
380 unsafe { ops.malloc_size_of(*(self as *const Self as *const *const ())) }
381 }
382}
383
384impl<T: MallocSizeOf> MallocSizeOf for thin_vec::ThinVec<T> {
385 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
386 let mut n = self.shallow_size_of(ops);
387 for elem in self.iter() {
388 n += elem.size_of(ops);
389 }
390 n
391 }
392}
393
394macro_rules! malloc_size_of_hash_set {
395 ($ty:ty) => {
396 impl<T, S> MallocShallowSizeOf for $ty
397 where
398 T: Eq + Hash,
399 S: BuildHasher,
400 {
401 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
402 if ops.has_malloc_enclosing_size_of() {
403 self.iter()
408 .next()
409 .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
410 } else {
411 self.capacity() * (size_of::<T>() + size_of::<usize>())
413 }
414 }
415 }
416
417 impl<T, S> MallocSizeOf for $ty
418 where
419 T: Eq + Hash + MallocSizeOf,
420 S: BuildHasher,
421 {
422 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
423 let mut n = self.shallow_size_of(ops);
424 for t in self.iter() {
425 n += t.size_of(ops);
426 }
427 n
428 }
429 }
430 };
431}
432
433malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
434
435macro_rules! malloc_size_of_hash_map {
436 ($ty:ty) => {
437 impl<K, V, S> MallocShallowSizeOf for $ty
438 where
439 K: Eq + Hash,
440 S: BuildHasher,
441 {
442 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
443 if ops.has_malloc_enclosing_size_of() {
445 self.values()
446 .next()
447 .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
448 } else {
449 self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
450 }
451 }
452 }
453
454 impl<K, V, S> MallocSizeOf for $ty
455 where
456 K: Eq + Hash + MallocSizeOf,
457 V: MallocSizeOf,
458 S: BuildHasher,
459 {
460 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
461 let mut n = self.shallow_size_of(ops);
462 for (k, v) in self.iter() {
463 n += k.size_of(ops);
464 n += v.size_of(ops);
465 }
466 n
467 }
468 }
469 };
470}
471
472malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
473
474impl<K, V> MallocShallowSizeOf for std::collections::BTreeMap<K, V>
475where
476 K: Eq + Hash,
477{
478 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
479 if ops.has_malloc_enclosing_size_of() {
480 self.values()
481 .next()
482 .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
483 } else {
484 self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
485 }
486 }
487}
488
489impl<K, V> MallocSizeOf for std::collections::BTreeMap<K, V>
490where
491 K: Eq + Hash + MallocSizeOf,
492 V: MallocSizeOf,
493{
494 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
495 let mut n = self.shallow_size_of(ops);
496 for (k, v) in self.iter() {
497 n += k.size_of(ops);
498 n += v.size_of(ops);
499 }
500 n
501 }
502}
503
504impl<T> MallocSizeOf for std::marker::PhantomData<T> {
506 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
507 0
508 }
509}
510
511impl<T> MallocUnconditionalShallowSizeOf for servo_arc::Arc<T> {
519 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
520 unsafe { ops.malloc_size_of(self.heap_ptr()) }
521 }
522}
523
524impl<T: MallocSizeOf> MallocUnconditionalSizeOf for servo_arc::Arc<T> {
525 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
526 self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
527 }
528}
529
530impl<T> MallocConditionalShallowSizeOf for servo_arc::Arc<T> {
531 fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
532 if ops.have_seen_ptr(self.heap_ptr()) {
533 0
534 } else {
535 self.unconditional_shallow_size_of(ops)
536 }
537 }
538}
539
540impl<T: MallocSizeOf> MallocConditionalSizeOf for servo_arc::Arc<T> {
541 fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
542 if ops.have_seen_ptr(self.heap_ptr()) {
543 0
544 } else {
545 self.unconditional_size_of(ops)
546 }
547 }
548}
549
550impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
557 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
558 (*self.lock().unwrap()).size_of(ops)
559 }
560}
561
562impl MallocSizeOf for smallbitvec::SmallBitVec {
563 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
564 if let Some(ptr) = self.heap_ptr() {
565 unsafe { ops.malloc_size_of(ptr) }
566 } else {
567 0
568 }
569 }
570}
571
572impl<T: MallocSizeOf, Unit> MallocSizeOf for euclid::Length<T, Unit> {
573 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
574 self.0.size_of(ops)
575 }
576}
577
578impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Scale<T, Src, Dst> {
579 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
580 self.0.size_of(ops)
581 }
582}
583
584impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Point2D<T, U> {
585 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
586 self.x.size_of(ops) + self.y.size_of(ops)
587 }
588}
589
590impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Rect<T, U> {
591 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
592 self.origin.size_of(ops) + self.size.size_of(ops)
593 }
594}
595
596impl<T: MallocSizeOf, U> MallocSizeOf for euclid::SideOffsets2D<T, U> {
597 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
598 self.top.size_of(ops)
599 + self.right.size_of(ops)
600 + self.bottom.size_of(ops)
601 + self.left.size_of(ops)
602 }
603}
604
605impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Size2D<T, U> {
606 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
607 self.width.size_of(ops) + self.height.size_of(ops)
608 }
609}
610
611impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform2D<T, Src, Dst> {
612 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
613 self.m11.size_of(ops)
614 + self.m12.size_of(ops)
615 + self.m21.size_of(ops)
616 + self.m22.size_of(ops)
617 + self.m31.size_of(ops)
618 + self.m32.size_of(ops)
619 }
620}
621
622impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform3D<T, Src, Dst> {
623 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
624 self.m11.size_of(ops)
625 + self.m12.size_of(ops)
626 + self.m13.size_of(ops)
627 + self.m14.size_of(ops)
628 + self.m21.size_of(ops)
629 + self.m22.size_of(ops)
630 + self.m23.size_of(ops)
631 + self.m24.size_of(ops)
632 + self.m31.size_of(ops)
633 + self.m32.size_of(ops)
634 + self.m33.size_of(ops)
635 + self.m34.size_of(ops)
636 + self.m41.size_of(ops)
637 + self.m42.size_of(ops)
638 + self.m43.size_of(ops)
639 + self.m44.size_of(ops)
640 }
641}
642
643impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Vector2D<T, U> {
644 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
645 self.x.size_of(ops) + self.y.size_of(ops)
646 }
647}
648
649impl MallocSizeOf for selectors::parser::AncestorHashes {
650 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
651 let selectors::parser::AncestorHashes { ref packed_hashes } = *self;
652 packed_hashes.size_of(ops)
653 }
654}
655
656impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
657 for selectors::parser::Selector<Impl>
658where
659 Impl::NonTSPseudoClass: MallocSizeOf,
660 Impl::PseudoElement: MallocSizeOf,
661{
662 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
663 let mut n = 0;
664
665 n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
669 for component in self.iter_raw_match_order() {
670 n += component.size_of(ops);
671 }
672
673 n
674 }
675}
676
677impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
678 for selectors::parser::SelectorList<Impl>
679where
680 Impl::NonTSPseudoClass: MallocSizeOf,
681 Impl::PseudoElement: MallocSizeOf,
682{
683 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
684 let mut n = 0;
685
686 n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
689 if self.len() > 1 {
690 for selector in self.slice().iter() {
691 n += selector.size_of(ops);
692 }
693 }
694 n
695 }
696}
697
698impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
699 for selectors::parser::Component<Impl>
700where
701 Impl::NonTSPseudoClass: MallocSizeOf,
702 Impl::PseudoElement: MallocSizeOf,
703{
704 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
705 use selectors::parser::Component;
706
707 match self {
708 Component::AttributeOther(ref attr_selector) => attr_selector.size_of(ops),
709 Component::Negation(ref components) => components.unconditional_size_of(ops),
710 Component::NonTSPseudoClass(ref pseudo) => (*pseudo).size_of(ops),
711 Component::Slotted(ref selector) | Component::Host(Some(ref selector)) => {
712 selector.unconditional_size_of(ops)
713 },
714 Component::Is(ref list) | Component::Where(ref list) => list.unconditional_size_of(ops),
715 Component::Has(ref relative_selectors) => relative_selectors.size_of(ops),
716 Component::NthOf(ref nth_of_data) => nth_of_data.size_of(ops),
717 Component::PseudoElement(ref pseudo) => (*pseudo).size_of(ops),
718 Component::Combinator(..)
719 | Component::ExplicitAnyNamespace
720 | Component::ExplicitNoNamespace
721 | Component::DefaultNamespace(..)
722 | Component::Namespace(..)
723 | Component::ExplicitUniversalType
724 | Component::LocalName(..)
725 | Component::ID(..)
726 | Component::Part(..)
727 | Component::Class(..)
728 | Component::AttributeInNoNamespaceExists { .. }
729 | Component::AttributeInNoNamespace { .. }
730 | Component::Root
731 | Component::Empty
732 | Component::Scope
733 | Component::ImplicitScope
734 | Component::ParentSelector
735 | Component::Nth(..)
736 | Component::Host(None)
737 | Component::RelativeSelectorAnchor
738 | Component::Invalid(..) => 0,
739 }
740 }
741}
742
743impl<Impl: selectors::parser::SelectorImpl> MallocSizeOf
744 for selectors::attr::AttrSelectorWithOptionalNamespace<Impl>
745{
746 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
747 0
748 }
749}
750
751impl MallocSizeOf for selectors::parser::AnPlusB {
752 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
753 0
754 }
755}
756
757impl MallocSizeOf for Void {
758 #[inline]
759 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
760 void::unreachable(*self)
761 }
762}
763
764#[cfg(feature = "servo")]
765impl<Static: string_cache::StaticAtomSet> MallocSizeOf for string_cache::Atom<Static> {
766 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
767 0
768 }
769}
770
771#[macro_export]
773macro_rules! malloc_size_of_is_0(
774 ($($ty:ty),+) => (
775 $(
776 impl $crate::MallocSizeOf for $ty {
777 #[inline(always)]
778 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
779 0
780 }
781 }
782 )+
783 );
784 ($($ty:ident<$($gen:ident),+>),+) => (
785 $(
786 impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
787 #[inline(always)]
788 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
789 0
790 }
791 }
792 )+
793 );
794);
795
796malloc_size_of_is_0!(bool, char, str);
797malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
798malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
799malloc_size_of_is_0!(f32, f64);
800
801malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
802malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
803malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
804malloc_size_of_is_0!(std::num::NonZeroUsize);
805malloc_size_of_is_0!(std::num::NonZeroU64);
806
807malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
808malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
809malloc_size_of_is_0!(Range<f32>, Range<f64>);
810
811malloc_size_of_is_0!(app_units::Au);
812
813malloc_size_of_is_0!(
814 cssparser::TokenSerializationType,
815 cssparser::SourceLocation,
816 cssparser::SourcePosition
817);
818
819malloc_size_of_is_0!(selectors::OpaqueElement);
820
821#[derive(Clone)]
824pub struct Measurable<T: MallocSizeOf>(pub T);
825
826impl<T: MallocSizeOf> Deref for Measurable<T> {
827 type Target = T;
828
829 fn deref(&self) -> &T {
830 &self.0
831 }
832}
833
834impl<T: MallocSizeOf> DerefMut for Measurable<T> {
835 fn deref_mut(&mut self) -> &mut T {
836 &mut self.0
837 }
838}