1extern crate app_units;
50extern crate cssparser;
51extern crate euclid;
52extern crate selectors;
53extern crate servo_arc;
54extern crate smallbitvec;
55extern crate smallvec;
56extern crate void;
57
58use std::hash::{BuildHasher, Hash};
59use std::mem::size_of;
60use std::ops::Range;
61use std::ops::{Deref, DerefMut};
62use std::os::raw::c_void;
63use void::Void;
64
65type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
67
68type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
70
71pub struct MallocSizeOfOps {
73 size_of_op: VoidPtrToSizeFn,
75
76 enclosing_size_of_op: Option<VoidPtrToSizeFn>,
81
82 have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
86}
87
88impl MallocSizeOfOps {
89 pub fn new(
90 size_of: VoidPtrToSizeFn,
91 malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
92 have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
93 ) -> Self {
94 MallocSizeOfOps {
95 size_of_op: size_of,
96 enclosing_size_of_op: malloc_enclosing_size_of,
97 have_seen_ptr_op: have_seen_ptr,
98 }
99 }
100
101 fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
104 return ptr as *const usize as usize <= 256;
112 }
113
114 pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
117 if MallocSizeOfOps::is_empty(ptr) {
118 0
119 } else {
120 (self.size_of_op)(ptr as *const c_void)
121 }
122 }
123
124 pub fn has_malloc_enclosing_size_of(&self) -> bool {
126 self.enclosing_size_of_op.is_some()
127 }
128
129 pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
132 assert!(!MallocSizeOfOps::is_empty(ptr));
133 (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
134 }
135
136 pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
138 let have_seen_ptr_op = self
139 .have_seen_ptr_op
140 .as_mut()
141 .expect("missing have_seen_ptr_op");
142 have_seen_ptr_op(ptr as *const c_void)
143 }
144}
145
146pub trait MallocSizeOf {
149 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
152}
153
154pub trait MallocShallowSizeOf {
156 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
161}
162
163pub trait MallocUnconditionalSizeOf {
167 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
170}
171
172pub trait MallocUnconditionalShallowSizeOf {
174 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
176}
177
178pub trait MallocConditionalSizeOf {
182 fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
186}
187
188pub trait MallocConditionalShallowSizeOf {
190 fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
192}
193
194impl MallocSizeOf for String {
195 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
196 unsafe { ops.malloc_size_of(self.as_ptr()) }
197 }
198}
199
200impl<'a, T: ?Sized> MallocSizeOf for &'a T {
201 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
202 0
204 }
205}
206
207impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
208 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
209 unsafe { ops.malloc_size_of(&**self) }
210 }
211}
212
213impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
214 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
215 self.shallow_size_of(ops) + (**self).size_of(ops)
216 }
217}
218
219impl MallocSizeOf for () {
220 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
221 0
222 }
223}
224
225impl<T1, T2> MallocSizeOf for (T1, T2)
226where
227 T1: MallocSizeOf,
228 T2: MallocSizeOf,
229{
230 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
231 self.0.size_of(ops) + self.1.size_of(ops)
232 }
233}
234
235impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
236where
237 T1: MallocSizeOf,
238 T2: MallocSizeOf,
239 T3: MallocSizeOf,
240{
241 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
242 self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
243 }
244}
245
246impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
247where
248 T1: MallocSizeOf,
249 T2: MallocSizeOf,
250 T3: MallocSizeOf,
251 T4: MallocSizeOf,
252{
253 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
254 self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops) + self.3.size_of(ops)
255 }
256}
257
258impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
259 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
260 if let Some(val) = self.as_ref() {
261 val.size_of(ops)
262 } else {
263 0
264 }
265 }
266}
267
268impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
269 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
270 match *self {
271 Ok(ref x) => x.size_of(ops),
272 Err(ref e) => e.size_of(ops),
273 }
274 }
275}
276
277impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
278 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
279 self.get().size_of(ops)
280 }
281}
282
283impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
284 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
285 self.borrow().size_of(ops)
286 }
287}
288
289impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
290where
291 B::Owned: MallocSizeOf,
292{
293 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
294 match *self {
295 std::borrow::Cow::Borrowed(_) => 0,
296 std::borrow::Cow::Owned(ref b) => b.size_of(ops),
297 }
298 }
299}
300
301impl<T: MallocSizeOf> MallocSizeOf for [T] {
302 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
303 let mut n = 0;
304 for elem in self.iter() {
305 n += elem.size_of(ops);
306 }
307 n
308 }
309}
310
311impl<T> MallocShallowSizeOf for Vec<T> {
312 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
313 unsafe { ops.malloc_size_of(self.as_ptr()) }
314 }
315}
316
317impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
318 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
319 let mut n = self.shallow_size_of(ops);
320 for elem in self.iter() {
321 n += elem.size_of(ops);
322 }
323 n
324 }
325}
326
327impl<T> MallocShallowSizeOf for std::collections::VecDeque<T> {
328 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
329 if ops.has_malloc_enclosing_size_of() {
330 if let Some(front) = self.front() {
331 unsafe { ops.malloc_enclosing_size_of(&*front) }
333 } else {
334 0
336 }
337 } else {
338 self.capacity() * size_of::<T>()
340 }
341 }
342}
343
344impl<T: MallocSizeOf> MallocSizeOf for std::collections::VecDeque<T> {
345 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
346 let mut n = self.shallow_size_of(ops);
347 for elem in self.iter() {
348 n += elem.size_of(ops);
349 }
350 n
351 }
352}
353
354impl<A: smallvec::Array> MallocShallowSizeOf for smallvec::SmallVec<A> {
355 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
356 if self.spilled() {
357 unsafe { ops.malloc_size_of(self.as_ptr()) }
358 } else {
359 0
360 }
361 }
362}
363
364impl<A> MallocSizeOf for smallvec::SmallVec<A>
365where
366 A: smallvec::Array,
367 A::Item: MallocSizeOf,
368{
369 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
370 let mut n = self.shallow_size_of(ops);
371 for elem in self.iter() {
372 n += elem.size_of(ops);
373 }
374 n
375 }
376}
377
378impl<T> MallocShallowSizeOf for thin_vec::ThinVec<T> {
379 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
380 if self.capacity() == 0 {
381 return 0;
383 }
384
385 assert_eq!(
386 std::mem::size_of::<Self>(),
387 std::mem::size_of::<*const ()>()
388 );
389 unsafe { ops.malloc_size_of(*(self as *const Self as *const *const ())) }
390 }
391}
392
393impl<T: MallocSizeOf> MallocSizeOf for thin_vec::ThinVec<T> {
394 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
395 let mut n = self.shallow_size_of(ops);
396 for elem in self.iter() {
397 n += elem.size_of(ops);
398 }
399 n
400 }
401}
402
403macro_rules! malloc_size_of_hash_set {
404 ($ty:ty) => {
405 impl<T, S> MallocShallowSizeOf for $ty
406 where
407 T: Eq + Hash,
408 S: BuildHasher,
409 {
410 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
411 if ops.has_malloc_enclosing_size_of() {
412 self.iter()
417 .next()
418 .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
419 } else {
420 self.capacity() * (size_of::<T>() + size_of::<usize>())
422 }
423 }
424 }
425
426 impl<T, S> MallocSizeOf for $ty
427 where
428 T: Eq + Hash + MallocSizeOf,
429 S: BuildHasher,
430 {
431 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
432 let mut n = self.shallow_size_of(ops);
433 for t in self.iter() {
434 n += t.size_of(ops);
435 }
436 n
437 }
438 }
439 };
440}
441
442malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
443
444macro_rules! malloc_size_of_hash_map {
445 ($ty:ty) => {
446 impl<K, V, S> MallocShallowSizeOf for $ty
447 where
448 K: Eq + Hash,
449 S: BuildHasher,
450 {
451 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
452 if ops.has_malloc_enclosing_size_of() {
454 self.values()
455 .next()
456 .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
457 } else {
458 self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
459 }
460 }
461 }
462
463 impl<K, V, S> MallocSizeOf for $ty
464 where
465 K: Eq + Hash + MallocSizeOf,
466 V: MallocSizeOf,
467 S: BuildHasher,
468 {
469 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
470 let mut n = self.shallow_size_of(ops);
471 for (k, v) in self.iter() {
472 n += k.size_of(ops);
473 n += v.size_of(ops);
474 }
475 n
476 }
477 }
478 };
479}
480
481malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
482
483impl<K, V> MallocShallowSizeOf for std::collections::BTreeMap<K, V>
484where
485 K: Eq + Hash,
486{
487 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
488 if ops.has_malloc_enclosing_size_of() {
489 self.values()
490 .next()
491 .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
492 } else {
493 self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
494 }
495 }
496}
497
498impl<K, V> MallocSizeOf for std::collections::BTreeMap<K, V>
499where
500 K: Eq + Hash + MallocSizeOf,
501 V: MallocSizeOf,
502{
503 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
504 let mut n = self.shallow_size_of(ops);
505 for (k, v) in self.iter() {
506 n += k.size_of(ops);
507 n += v.size_of(ops);
508 }
509 n
510 }
511}
512
513impl<T> MallocSizeOf for std::marker::PhantomData<T> {
515 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
516 0
517 }
518}
519
520impl<T> MallocUnconditionalShallowSizeOf for servo_arc::Arc<T> {
528 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
529 unsafe { ops.malloc_size_of(self.heap_ptr()) }
530 }
531}
532
533impl<T: MallocSizeOf> MallocUnconditionalSizeOf for servo_arc::Arc<T> {
534 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
535 self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
536 }
537}
538
539impl<T> MallocConditionalShallowSizeOf for servo_arc::Arc<T> {
540 fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
541 if ops.have_seen_ptr(self.heap_ptr()) {
542 0
543 } else {
544 self.unconditional_shallow_size_of(ops)
545 }
546 }
547}
548
549impl<T: MallocSizeOf> MallocConditionalSizeOf for servo_arc::Arc<T> {
550 fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
551 if ops.have_seen_ptr(self.heap_ptr()) {
552 0
553 } else {
554 self.unconditional_size_of(ops)
555 }
556 }
557}
558
559impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
566 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
567 (*self.lock().unwrap()).size_of(ops)
568 }
569}
570
571impl MallocSizeOf for smallbitvec::SmallBitVec {
572 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
573 if let Some(ptr) = self.heap_ptr() {
574 unsafe { ops.malloc_size_of(ptr) }
575 } else {
576 0
577 }
578 }
579}
580
581impl<T: MallocSizeOf, Unit> MallocSizeOf for euclid::Length<T, Unit> {
582 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
583 self.0.size_of(ops)
584 }
585}
586
587impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Scale<T, Src, Dst> {
588 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
589 self.0.size_of(ops)
590 }
591}
592
593impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Point2D<T, U> {
594 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
595 self.x.size_of(ops) + self.y.size_of(ops)
596 }
597}
598
599impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Rect<T, U> {
600 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
601 self.origin.size_of(ops) + self.size.size_of(ops)
602 }
603}
604
605impl<T: MallocSizeOf, U> MallocSizeOf for euclid::SideOffsets2D<T, U> {
606 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
607 self.top.size_of(ops)
608 + self.right.size_of(ops)
609 + self.bottom.size_of(ops)
610 + self.left.size_of(ops)
611 }
612}
613
614impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Size2D<T, U> {
615 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
616 self.width.size_of(ops) + self.height.size_of(ops)
617 }
618}
619
620impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform2D<T, Src, Dst> {
621 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
622 self.m11.size_of(ops)
623 + self.m12.size_of(ops)
624 + self.m21.size_of(ops)
625 + self.m22.size_of(ops)
626 + self.m31.size_of(ops)
627 + self.m32.size_of(ops)
628 }
629}
630
631impl<T: MallocSizeOf, Src, Dst> MallocSizeOf for euclid::Transform3D<T, Src, Dst> {
632 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
633 self.m11.size_of(ops)
634 + self.m12.size_of(ops)
635 + self.m13.size_of(ops)
636 + self.m14.size_of(ops)
637 + self.m21.size_of(ops)
638 + self.m22.size_of(ops)
639 + self.m23.size_of(ops)
640 + self.m24.size_of(ops)
641 + self.m31.size_of(ops)
642 + self.m32.size_of(ops)
643 + self.m33.size_of(ops)
644 + self.m34.size_of(ops)
645 + self.m41.size_of(ops)
646 + self.m42.size_of(ops)
647 + self.m43.size_of(ops)
648 + self.m44.size_of(ops)
649 }
650}
651
652impl<T: MallocSizeOf, U> MallocSizeOf for euclid::Vector2D<T, U> {
653 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
654 self.x.size_of(ops) + self.y.size_of(ops)
655 }
656}
657
658impl MallocSizeOf for selectors::parser::AncestorHashes {
659 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
660 let selectors::parser::AncestorHashes { ref packed_hashes } = *self;
661 packed_hashes.size_of(ops)
662 }
663}
664
665impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
666 for selectors::parser::Selector<Impl>
667where
668 Impl::NonTSPseudoClass: MallocSizeOf,
669 Impl::PseudoElement: MallocSizeOf,
670{
671 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
672 let mut n = 0;
673
674 n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
678 for component in self.iter_raw_match_order() {
679 n += component.size_of(ops);
680 }
681
682 n
683 }
684}
685
686impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
687 for selectors::parser::SelectorList<Impl>
688where
689 Impl::NonTSPseudoClass: MallocSizeOf,
690 Impl::PseudoElement: MallocSizeOf,
691{
692 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
693 let mut n = 0;
694
695 n += unsafe { ops.malloc_size_of(self.thin_arc_heap_ptr()) };
698 if self.len() > 1 {
699 for selector in self.slice().iter() {
700 n += selector.size_of(ops);
701 }
702 }
703 n
704 }
705}
706
707impl<Impl: selectors::parser::SelectorImpl> MallocUnconditionalSizeOf
708 for selectors::parser::Component<Impl>
709where
710 Impl::NonTSPseudoClass: MallocSizeOf,
711 Impl::PseudoElement: MallocSizeOf,
712{
713 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
714 use selectors::parser::Component;
715
716 match self {
717 Component::AttributeOther(ref attr_selector) => attr_selector.size_of(ops),
718 Component::Negation(ref components) => components.unconditional_size_of(ops),
719 Component::NonTSPseudoClass(ref pseudo) => (*pseudo).size_of(ops),
720 Component::Slotted(ref selector) | Component::Host(Some(ref selector)) => {
721 selector.unconditional_size_of(ops)
722 },
723 Component::Is(ref list) | Component::Where(ref list) => list.unconditional_size_of(ops),
724 Component::Has(ref relative_selectors) => relative_selectors.size_of(ops),
725 Component::NthOf(ref nth_of_data) => nth_of_data.size_of(ops),
726 Component::PseudoElement(ref pseudo) => (*pseudo).size_of(ops),
727 Component::Combinator(..)
728 | Component::ExplicitAnyNamespace
729 | Component::ExplicitNoNamespace
730 | Component::DefaultNamespace(..)
731 | Component::Namespace(..)
732 | Component::ExplicitUniversalType
733 | Component::LocalName(..)
734 | Component::ID(..)
735 | Component::Part(..)
736 | Component::Class(..)
737 | Component::AttributeInNoNamespaceExists { .. }
738 | Component::AttributeInNoNamespace { .. }
739 | Component::Root
740 | Component::Empty
741 | Component::Scope
742 | Component::ImplicitScope
743 | Component::ParentSelector
744 | Component::Nth(..)
745 | Component::Host(None)
746 | Component::RelativeSelectorAnchor
747 | Component::Invalid(..) => 0,
748 }
749 }
750}
751
752impl<Impl: selectors::parser::SelectorImpl> MallocSizeOf
753 for selectors::attr::AttrSelectorWithOptionalNamespace<Impl>
754{
755 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
756 0
757 }
758}
759
760impl MallocSizeOf for selectors::parser::AnPlusB {
761 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
762 0
763 }
764}
765
766impl MallocSizeOf for Void {
767 #[inline]
768 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
769 void::unreachable(*self)
770 }
771}
772
773#[cfg(feature = "servo")]
774impl<Static: string_cache::StaticAtomSet> MallocSizeOf for string_cache::Atom<Static> {
775 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
776 0
777 }
778}
779
780#[macro_export]
782macro_rules! malloc_size_of_is_0(
783 ($($ty:ty),+) => (
784 $(
785 impl $crate::MallocSizeOf for $ty {
786 #[inline(always)]
787 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
788 0
789 }
790 }
791 )+
792 );
793 ($($ty:ident<$($gen:ident),+>),+) => (
794 $(
795 impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
796 #[inline(always)]
797 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
798 0
799 }
800 }
801 )+
802 );
803);
804
805malloc_size_of_is_0!(bool, char, str);
806malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
807malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
808malloc_size_of_is_0!(f32, f64);
809
810malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
811malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
812malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
813malloc_size_of_is_0!(std::num::NonZeroUsize);
814malloc_size_of_is_0!(std::num::NonZeroU64);
815
816malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
817malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
818malloc_size_of_is_0!(Range<f32>, Range<f64>);
819
820malloc_size_of_is_0!(app_units::Au);
821
822malloc_size_of_is_0!(
823 cssparser::TokenSerializationType,
824 cssparser::SourceLocation,
825 cssparser::SourcePosition
826);
827
828malloc_size_of_is_0!(selectors::OpaqueElement);
829
830#[derive(Clone)]
833pub struct Measurable<T: MallocSizeOf>(pub T);
834
835impl<T: MallocSizeOf> Deref for Measurable<T> {
836 type Target = T;
837
838 fn deref(&self) -> &T {
839 &self.0
840 }
841}
842
843impl<T: MallocSizeOf> DerefMut for Measurable<T> {
844 fn deref_mut(&mut self) -> &mut T {
845 &mut self.0
846 }
847}