style_traits/
arc_slice.rs1use serde::de::{Deserialize, Deserializer};
8use serde::ser::{Serialize, Serializer};
9use servo_arc::ThinArc;
10use std::ops::Deref;
11use std::ptr::NonNull;
12use std::sync::LazyLock;
13use std::{
14 hash::{Hash, Hasher},
15 iter, mem,
16};
17
18use malloc_size_of::{MallocSizeOf, MallocSizeOfOps, MallocUnconditionalSizeOf};
19
20const ARC_SLICE_CANARY: u64 = 0xf3f3f3f3f3f3f3f3;
29
30#[repr(C)]
32#[derive(Debug, Eq, PartialEq, ToShmem)]
33pub struct ArcSlice<T>(#[shmem(field_bound)] ThinArc<u64, T>);
34
35impl<T> Deref for ArcSlice<T> {
36 type Target = [T];
37
38 #[inline]
39 fn deref(&self) -> &Self::Target {
40 debug_assert_eq!(self.0.header, ARC_SLICE_CANARY);
41 self.0.slice()
42 }
43}
44
45impl<T> Clone for ArcSlice<T> {
46 fn clone(&self) -> Self {
47 ArcSlice(self.0.clone())
48 }
49}
50
51static EMPTY_ARC_SLICE: LazyLock<ArcSlice<u64>> =
53 LazyLock::new(|| ArcSlice::from_iter_leaked(iter::empty()));
54
55impl<T> Default for ArcSlice<T> {
56 #[allow(unsafe_code)]
57 fn default() -> Self {
58 debug_assert!(
59 mem::align_of::<T>() <= mem::align_of::<u64>(),
60 "Need to increase the alignment of EMPTY_ARC_SLICE"
61 );
62 unsafe {
63 let empty: ArcSlice<_> = EMPTY_ARC_SLICE.clone();
64 let empty: Self = mem::transmute(empty);
65 debug_assert_eq!(empty.len(), 0);
66 empty
67 }
68 }
69}
70
71impl<T: Serialize> Serialize for ArcSlice<T> {
72 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
73 where
74 S: Serializer,
75 {
76 self.deref().serialize(serializer)
77 }
78}
79
80impl<'de, T: Deserialize<'de>> Deserialize<'de> for ArcSlice<T> {
81 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
82 where
83 D: Deserializer<'de>,
84 {
85 let r = Vec::deserialize(deserializer)?;
86 Ok(ArcSlice::from_iter(r.into_iter()))
87 }
88}
89
90impl<T> ArcSlice<T> {
91 #[inline]
94 pub fn from_iter<I>(items: I) -> Self
95 where
96 I: Iterator<Item = T> + ExactSizeIterator,
97 {
98 if items.len() == 0 {
99 return Self::default();
100 }
101 ArcSlice(ThinArc::from_header_and_iter(ARC_SLICE_CANARY, items))
102 }
103
104 #[inline]
108 pub fn from_iter_leaked<I>(items: I) -> Self
109 where
110 I: Iterator<Item = T> + ExactSizeIterator,
111 {
112 let arc = ThinArc::from_header_and_iter(ARC_SLICE_CANARY, items);
113 arc.mark_as_intentionally_leaked();
114 ArcSlice(arc)
115 }
116
117 #[inline]
120 pub fn forget(self) -> ForgottenArcSlicePtr<T> {
121 let ret = ForgottenArcSlicePtr(self.0.raw_ptr().cast());
122 mem::forget(self);
123 ret
124 }
125
126 #[inline]
129 pub fn leaked_empty_ptr() -> *mut std::os::raw::c_void {
130 let empty: ArcSlice<_> = EMPTY_ARC_SLICE.clone();
131 let ptr = empty.0.raw_ptr();
132 std::mem::forget(empty);
133 ptr.cast().as_ptr()
134 }
135
136 pub fn is_unique(&self) -> bool {
138 self.0.is_unique()
139 }
140}
141
142impl<T: MallocSizeOf> MallocUnconditionalSizeOf for ArcSlice<T> {
143 #[allow(unsafe_code)]
144 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
145 let mut size = unsafe { ops.malloc_size_of(self.0.heap_ptr()) };
146 for el in self.iter() {
147 size += el.size_of(ops);
148 }
149 size
150 }
151}
152
153impl<T: Hash> Hash for ArcSlice<T> {
154 fn hash<H: Hasher>(&self, state: &mut H) {
155 T::hash_slice(&**self, state)
156 }
157}
158
159#[repr(C)]
164pub struct ForgottenArcSlicePtr<T>(NonNull<T>);