kstring/
string.rs

1use std::{borrow::Cow, fmt};
2
3use crate::stack::StackString;
4use crate::KStringCowBase;
5use crate::KStringRef;
6
7pub(crate) type StdString = std::string::String;
8
9/// A UTF-8 encoded, immutable string.
10pub type KString = KStringBase<crate::backend::DefaultStr>;
11
12/// A UTF-8 encoded, immutable string.
13#[derive(Clone)]
14#[repr(transparent)]
15pub struct KStringBase<B> {
16    inner: KStringInner<B>,
17}
18
19impl<B> KStringBase<B> {
20    pub const EMPTY: Self = KStringBase::from_static("");
21
22    /// Create a new empty `KStringBase`.
23    #[inline]
24    #[must_use]
25    pub fn new() -> Self {
26        Self::EMPTY
27    }
28
29    /// Create a reference to a `'static` data.
30    #[inline]
31    #[must_use]
32    pub const fn from_static(other: &'static str) -> Self {
33        Self {
34            inner: KStringInner::from_static(other),
35        }
36    }
37
38    /// Create an inline string, if possible
39    #[inline]
40    #[must_use]
41    pub fn try_inline(other: &str) -> Option<Self> {
42        KStringInner::try_inline(other).map(|inner| Self { inner })
43    }
44}
45
46impl<B: crate::backend::HeapStr> KStringBase<B> {
47    /// Create an owned `KStringBase`.
48    #[inline]
49    #[must_use]
50    pub fn from_boxed(other: crate::backend::BoxedStr) -> Self {
51        Self {
52            inner: KStringInner::from_boxed(other),
53        }
54    }
55
56    /// Create an owned `KStringBase`.
57    #[inline]
58    #[must_use]
59    pub fn from_string(other: StdString) -> Self {
60        Self {
61            inner: KStringInner::from_string(other),
62        }
63    }
64
65    /// Create an owned `KStringBase` optimally from a reference.
66    #[inline]
67    #[must_use]
68    pub fn from_ref(other: &str) -> Self {
69        Self {
70            inner: KStringInner::from_ref(other),
71        }
72    }
73
74    /// Get a reference to the `KStringBase`.
75    #[inline]
76    #[must_use]
77    pub fn as_ref(&self) -> KStringRef<'_> {
78        self.inner.as_ref()
79    }
80
81    /// Extracts a string slice containing the entire `KStringBase`.
82    #[inline]
83    #[must_use]
84    pub fn as_str(&self) -> &str {
85        self.inner.as_str()
86    }
87
88    /// Convert to a mutable string type, cloning the data if necessary.
89    #[inline]
90    #[must_use]
91    pub fn into_string(self) -> StdString {
92        String::from(self.into_boxed_str())
93    }
94
95    /// Convert to a mutable string type, cloning the data if necessary.
96    #[inline]
97    #[must_use]
98    pub fn into_boxed_str(self) -> crate::backend::BoxedStr {
99        self.inner.into_boxed_str()
100    }
101
102    /// Convert to a Cow str
103    #[inline]
104    #[must_use]
105    pub fn into_cow_str(self) -> Cow<'static, str> {
106        self.inner.into_cow_str()
107    }
108}
109
110impl<B: crate::backend::HeapStr> std::ops::Deref for KStringBase<B> {
111    type Target = str;
112
113    #[inline]
114    fn deref(&self) -> &str {
115        self.as_str()
116    }
117}
118
119impl<B: crate::backend::HeapStr> Eq for KStringBase<B> {}
120
121impl<B: crate::backend::HeapStr> PartialEq<KStringBase<B>> for KStringBase<B> {
122    #[inline]
123    fn eq(&self, other: &Self) -> bool {
124        PartialEq::eq(self.as_str(), other.as_str())
125    }
126}
127
128impl<B: crate::backend::HeapStr> PartialEq<str> for KStringBase<B> {
129    #[inline]
130    fn eq(&self, other: &str) -> bool {
131        PartialEq::eq(self.as_str(), other)
132    }
133}
134
135impl<'s, B: crate::backend::HeapStr> PartialEq<&'s str> for KStringBase<B> {
136    #[inline]
137    fn eq(&self, other: &&str) -> bool {
138        PartialEq::eq(self.as_str(), *other)
139    }
140}
141
142impl<B: crate::backend::HeapStr> PartialEq<String> for KStringBase<B> {
143    #[inline]
144    fn eq(&self, other: &StdString) -> bool {
145        PartialEq::eq(self.as_str(), other.as_str())
146    }
147}
148
149impl<B: crate::backend::HeapStr> Ord for KStringBase<B> {
150    #[inline]
151    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
152        self.as_str().cmp(other.as_str())
153    }
154}
155
156impl<B: crate::backend::HeapStr> PartialOrd for KStringBase<B> {
157    #[inline]
158    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
159        Some(self.cmp(other))
160    }
161}
162
163impl<B: crate::backend::HeapStr> std::hash::Hash for KStringBase<B> {
164    #[inline]
165    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
166        self.as_str().hash(state);
167    }
168}
169
170impl<B: crate::backend::HeapStr> fmt::Debug for KStringBase<B> {
171    #[inline]
172    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
173        self.as_str().fmt(f)
174    }
175}
176
177impl<B: crate::backend::HeapStr> fmt::Display for KStringBase<B> {
178    #[inline]
179    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
180        fmt::Display::fmt(self.as_str(), f)
181    }
182}
183
184impl<B: crate::backend::HeapStr> AsRef<str> for KStringBase<B> {
185    #[inline]
186    fn as_ref(&self) -> &str {
187        self.as_str()
188    }
189}
190
191impl<B: crate::backend::HeapStr> AsRef<[u8]> for KStringBase<B> {
192    #[inline]
193    fn as_ref(&self) -> &[u8] {
194        self.as_bytes()
195    }
196}
197
198impl<B: crate::backend::HeapStr> AsRef<std::ffi::OsStr> for KStringBase<B> {
199    #[inline]
200    fn as_ref(&self) -> &std::ffi::OsStr {
201        (**self).as_ref()
202    }
203}
204
205impl<B: crate::backend::HeapStr> AsRef<std::path::Path> for KStringBase<B> {
206    #[inline]
207    fn as_ref(&self) -> &std::path::Path {
208        std::path::Path::new(self)
209    }
210}
211
212impl<B: crate::backend::HeapStr> std::borrow::Borrow<str> for KStringBase<B> {
213    #[inline]
214    fn borrow(&self) -> &str {
215        self.as_str()
216    }
217}
218
219impl<B: crate::backend::HeapStr> Default for KStringBase<B> {
220    #[inline]
221    fn default() -> Self {
222        Self::new()
223    }
224}
225
226impl<'s, B: crate::backend::HeapStr> From<KStringRef<'s>> for KStringBase<B> {
227    #[inline]
228    fn from(other: KStringRef<'s>) -> Self {
229        other.to_owned()
230    }
231}
232
233impl<'s, B: crate::backend::HeapStr> From<&'s KStringRef<'s>> for KStringBase<B> {
234    #[inline]
235    fn from(other: &'s KStringRef<'s>) -> Self {
236        other.to_owned()
237    }
238}
239
240impl<'s, B: crate::backend::HeapStr> From<KStringCowBase<'s, B>> for KStringBase<B> {
241    #[inline]
242    fn from(other: KStringCowBase<'s, B>) -> Self {
243        other.into_owned()
244    }
245}
246
247impl<'s, B: crate::backend::HeapStr> From<&'s KStringCowBase<'s, B>> for KStringBase<B> {
248    #[inline]
249    fn from(other: &'s KStringCowBase<'s, B>) -> Self {
250        other.clone().into_owned()
251    }
252}
253
254impl<B: crate::backend::HeapStr> From<StdString> for KStringBase<B> {
255    #[inline]
256    fn from(other: StdString) -> Self {
257        Self::from_string(other)
258    }
259}
260
261impl<'s, B: crate::backend::HeapStr> From<&'s StdString> for KStringBase<B> {
262    #[inline]
263    fn from(other: &'s StdString) -> Self {
264        Self::from_ref(other)
265    }
266}
267
268impl<B: crate::backend::HeapStr> From<crate::backend::BoxedStr> for KStringBase<B> {
269    #[inline]
270    fn from(other: crate::backend::BoxedStr) -> Self {
271        Self::from_boxed(other)
272    }
273}
274
275impl<'s, B: crate::backend::HeapStr> From<&'s crate::backend::BoxedStr> for KStringBase<B> {
276    #[inline]
277    fn from(other: &'s crate::backend::BoxedStr) -> Self {
278        Self::from_ref(other)
279    }
280}
281
282impl<B: crate::backend::HeapStr> From<&'static str> for KStringBase<B> {
283    #[inline]
284    fn from(other: &'static str) -> Self {
285        Self::from_static(other)
286    }
287}
288
289impl<B: crate::backend::HeapStr> std::str::FromStr for KStringBase<B> {
290    type Err = std::convert::Infallible;
291    #[inline]
292    fn from_str(s: &str) -> Result<Self, Self::Err> {
293        Ok(Self::from_ref(s))
294    }
295}
296
297#[cfg(feature = "serde")]
298impl<B: crate::backend::HeapStr> serde::Serialize for KStringBase<B> {
299    #[inline]
300    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
301    where
302        S: serde::Serializer,
303    {
304        serializer.serialize_str(self.as_str())
305    }
306}
307
308#[cfg(feature = "serde")]
309impl<'de, B: crate::backend::HeapStr> serde::Deserialize<'de> for KStringBase<B> {
310    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
311    where
312        D: serde::Deserializer<'de>,
313    {
314        deserializer.deserialize_string(StringVisitor(std::marker::PhantomData))
315    }
316}
317
318#[cfg(feature = "serde")]
319struct StringVisitor<B>(std::marker::PhantomData<B>);
320
321#[cfg(feature = "serde")]
322impl<'de, B: crate::backend::HeapStr> serde::de::Visitor<'de> for StringVisitor<B> {
323    type Value = KStringBase<B>;
324
325    fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
326        formatter.write_str("a string")
327    }
328
329    fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
330    where
331        E: serde::de::Error,
332    {
333        Ok(Self::Value::from_ref(v))
334    }
335
336    fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
337    where
338        E: serde::de::Error,
339    {
340        Ok(Self::Value::from_string(v))
341    }
342
343    fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
344    where
345        E: serde::de::Error,
346    {
347        match std::str::from_utf8(v) {
348            Ok(s) => Ok(Self::Value::from_ref(s)),
349            Err(_) => Err(serde::de::Error::invalid_value(
350                serde::de::Unexpected::Bytes(v),
351                &self,
352            )),
353        }
354    }
355
356    fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
357    where
358        E: serde::de::Error,
359    {
360        match String::from_utf8(v) {
361            Ok(s) => Ok(Self::Value::from_string(s)),
362            Err(e) => Err(serde::de::Error::invalid_value(
363                serde::de::Unexpected::Bytes(&e.into_bytes()),
364                &self,
365            )),
366        }
367    }
368}
369
370use inner::KStringInner;
371
372#[cfg(not(feature = "unsafe"))]
373mod inner {
374    use super::*;
375
376    pub(super) enum KStringInner<B> {
377        Singleton(&'static str),
378        Inline(StackString<CAPACITY>),
379        Owned(B),
380    }
381
382    impl<B> KStringInner<B> {
383        /// Create a reference to a `'static` data.
384        #[inline]
385        pub const fn from_static(other: &'static str) -> Self {
386            Self::Singleton(other)
387        }
388
389        #[inline]
390        pub fn try_inline(other: &str) -> Option<Self> {
391            StackString::try_new(other).map(Self::Inline)
392        }
393    }
394
395    impl<B: crate::backend::HeapStr> KStringInner<B> {
396        #[inline]
397        pub(super) fn from_boxed(other: crate::backend::BoxedStr) -> Self {
398            #[allow(clippy::useless_conversion)]
399            Self::Owned(B::from_boxed_str(other))
400        }
401
402        #[inline]
403        pub(super) fn from_string(other: StdString) -> Self {
404            if (0..=CAPACITY).contains(&other.len()) {
405                let inline = { StackString::new(other.as_str()) };
406                Self::Inline(inline)
407            } else {
408                Self::from_boxed(other.into_boxed_str())
409            }
410        }
411
412        #[inline]
413        pub(super) fn from_ref(other: &str) -> Self {
414            if (0..=CAPACITY).contains(&other.len()) {
415                let inline = { StackString::new(other) };
416                Self::Inline(inline)
417            } else {
418                Self::Owned(B::from_str(other))
419            }
420        }
421
422        #[inline]
423        pub(super) fn as_ref(&self) -> KStringRef<'_> {
424            match self {
425                Self::Singleton(s) => KStringRef::from_static(s),
426                Self::Inline(s) => KStringRef::from_ref(s.as_str()),
427                Self::Owned(s) => KStringRef::from_ref(s.as_str()),
428            }
429        }
430
431        #[inline]
432        pub(super) fn as_str(&self) -> &str {
433            match self {
434                Self::Singleton(s) => s,
435                Self::Inline(s) => s.as_str(),
436                Self::Owned(s) => s.as_str(),
437            }
438        }
439
440        #[inline]
441        pub(super) fn into_boxed_str(self) -> crate::backend::BoxedStr {
442            match self {
443                Self::Singleton(s) => crate::backend::BoxedStr::from(s),
444                Self::Inline(s) => crate::backend::BoxedStr::from(s.as_str()),
445                Self::Owned(s) => crate::backend::BoxedStr::from(s.as_str()),
446            }
447        }
448
449        /// Convert to a Cow str
450        #[inline]
451        pub(super) fn into_cow_str(self) -> Cow<'static, str> {
452            match self {
453                Self::Singleton(s) => Cow::Borrowed(s),
454                Self::Inline(s) => Cow::Owned(s.as_str().into()),
455                Self::Owned(s) => Cow::Owned(s.as_str().into()),
456            }
457        }
458    }
459
460    // Explicit to avoid inlining which cuts clone times in half.
461    //
462    // An automatically derived `clone()` has 10ns overhead while the explicit `Deref`/`as_str` has
463    // none of that.  Being explicit and removing the `#[inline]` attribute dropped the overhead to
464    // 5ns.
465    //
466    // My only guess is that the `clone()` calls we delegate to are just that much bigger than
467    // `as_str()` that, when combined with a jump table, is blowing the icache, slowing things down.
468    impl<B: Clone> Clone for KStringInner<B> {
469        fn clone(&self) -> Self {
470            match self {
471                Self::Singleton(s) => Self::Singleton(s),
472                Self::Inline(s) => Self::Inline(*s),
473                Self::Owned(s) => Self::Owned(s.clone()),
474            }
475        }
476    }
477
478    #[allow(unused)]
479    const LEN_SIZE: usize = std::mem::size_of::<crate::stack::Len>();
480
481    #[allow(unused)]
482    const TAG_SIZE: usize = std::mem::size_of::<u8>();
483
484    #[allow(unused)]
485    const MAX_CAPACITY: usize =
486        std::mem::size_of::<crate::string::StdString>() - TAG_SIZE - LEN_SIZE;
487
488    // Performance seems to slow down when trying to occupy all of the padding left by `String`'s
489    // discriminant.  The question is whether faster len=1-16 "allocations" outweighs going to the heap
490    // for len=17-22.
491    #[allow(unused)]
492    const ALIGNED_CAPACITY: usize = std::mem::size_of::<crate::backend::DefaultStr>() - LEN_SIZE;
493
494    #[cfg(feature = "max_inline")]
495    const CAPACITY: usize = MAX_CAPACITY;
496    #[cfg(not(feature = "max_inline"))]
497    const CAPACITY: usize = ALIGNED_CAPACITY;
498}
499
500#[cfg(feature = "unsafe")]
501mod inner {
502    use super::*;
503
504    #[repr(C)]
505    pub(super) union KStringInner<B> {
506        tag: TagVariant,
507        singleton: SingletonVariant,
508        owned: std::mem::ManuallyDrop<OwnedVariant<B>>,
509        inline: InlineVariant,
510    }
511
512    impl<B> KStringInner<B> {
513        /// Create a reference to a `'static` data.
514        #[inline]
515        pub const fn from_static(other: &'static str) -> Self {
516            Self {
517                singleton: SingletonVariant::new(other),
518            }
519        }
520
521        #[inline]
522        pub fn try_inline(other: &str) -> Option<Self> {
523            StackString::try_new(other).map(|inline| Self {
524                inline: InlineVariant::new(inline),
525            })
526        }
527
528        #[inline]
529        const fn tag(&self) -> Tag {
530            unsafe {
531                // SAFETY: `tag` is in the same spot in each variant
532                self.tag.tag
533            }
534        }
535    }
536
537    impl<B: crate::backend::HeapStr> KStringInner<B> {
538        #[inline]
539        pub(super) fn from_boxed(other: crate::backend::BoxedStr) -> Self {
540            #[allow(clippy::useless_conversion)]
541            let payload = B::from_boxed_str(other);
542            Self {
543                owned: std::mem::ManuallyDrop::new(OwnedVariant::new(payload)),
544            }
545        }
546
547        #[inline]
548        pub(super) fn from_string(other: StdString) -> Self {
549            if (0..=CAPACITY).contains(&other.len()) {
550                let payload = unsafe {
551                    // SAFETY: range check ensured this is always safe
552                    StackString::new_unchecked(other.as_str())
553                };
554                Self {
555                    inline: InlineVariant::new(payload),
556                }
557            } else {
558                Self::from_boxed(other.into_boxed_str())
559            }
560        }
561
562        #[inline]
563        pub(super) fn from_ref(other: &str) -> Self {
564            if (0..=CAPACITY).contains(&other.len()) {
565                let payload = unsafe {
566                    // SAFETY: range check ensured this is always safe
567                    StackString::new_unchecked(other)
568                };
569                Self {
570                    inline: InlineVariant::new(payload),
571                }
572            } else {
573                #[allow(clippy::useless_conversion)]
574                let payload = B::from_str(other);
575                Self {
576                    owned: std::mem::ManuallyDrop::new(OwnedVariant::new(payload)),
577                }
578            }
579        }
580
581        #[inline]
582        pub(super) fn as_ref(&self) -> KStringRef<'_> {
583            let tag = self.tag();
584            unsafe {
585                // SAFETY: `tag` ensures access to correct variant
586                if tag.is_singleton() {
587                    KStringRef::from_static(self.singleton.payload)
588                } else if tag.is_owned() {
589                    KStringRef::from_ref(self.owned.payload.as_str())
590                } else {
591                    debug_assert!(tag.is_inline());
592                    KStringRef::from_ref(self.inline.payload.as_str())
593                }
594            }
595        }
596
597        #[inline]
598        pub(super) fn as_str(&self) -> &str {
599            let tag = self.tag();
600            unsafe {
601                // SAFETY: `tag` ensures access to correct variant
602                if tag.is_singleton() {
603                    self.singleton.payload
604                } else if tag.is_owned() {
605                    self.owned.payload.as_str()
606                } else {
607                    debug_assert!(tag.is_inline());
608                    self.inline.payload.as_str()
609                }
610            }
611        }
612
613        #[inline]
614        pub(super) fn into_boxed_str(self) -> crate::backend::BoxedStr {
615            let tag = self.tag();
616            unsafe {
617                // SAFETY: `tag` ensures access to correct variant
618                if tag.is_singleton() {
619                    crate::backend::BoxedStr::from(self.singleton.payload)
620                } else if tag.is_owned() {
621                    crate::backend::BoxedStr::from(self.owned.payload.as_str())
622                } else {
623                    debug_assert!(tag.is_inline());
624                    crate::backend::BoxedStr::from(self.inline.payload.as_ref())
625                }
626            }
627        }
628
629        /// Convert to a Cow str
630        #[inline]
631        pub(super) fn into_cow_str(self) -> Cow<'static, str> {
632            let tag = self.tag();
633            unsafe {
634                // SAFETY: `tag` ensures access to correct variant
635                if tag.is_singleton() {
636                    Cow::Borrowed(self.singleton.payload)
637                } else if tag.is_owned() {
638                    Cow::Owned(self.owned.payload.as_str().into())
639                } else {
640                    debug_assert!(tag.is_inline());
641                    Cow::Owned(self.inline.payload.as_str().into())
642                }
643            }
644        }
645    }
646
647    // Explicit to avoid inlining which cuts clone times in half.
648    //
649    // An automatically derived `clone()` has 10ns overhead while the explicit `Deref`/`as_str` has
650    // none of that.  Being explicit and removing the `#[inline]` attribute dropped the overhead to
651    // 5ns.
652    //
653    // My only guess is that the `clone()` calls we delegate to are just that much bigger than
654    // `as_str()` that, when combined with a jump table, is blowing the icache, slowing things down.
655    impl<B: Clone> Clone for KStringInner<B> {
656        fn clone(&self) -> Self {
657            let tag = self.tag();
658            if tag.is_owned() {
659                unsafe {
660                    // SAFETY: `tag` ensures access to correct variant
661                    Self {
662                        owned: std::mem::ManuallyDrop::new(OwnedVariant::new(
663                            self.owned.payload.clone(),
664                        )),
665                    }
666                }
667            } else {
668                unsafe {
669                    // SAFETY: `tag` ensures access to correct variant
670                    // SAFETY: non-owned types are copyable
671                    std::mem::transmute_copy(self)
672                }
673            }
674        }
675    }
676
677    impl<B> Drop for KStringInner<B> {
678        fn drop(&mut self) {
679            let tag = self.tag();
680            if tag.is_owned() {
681                unsafe {
682                    // SAFETY: `tag` ensures we are using the right variant
683                    std::mem::ManuallyDrop::drop(&mut self.owned)
684                }
685            }
686        }
687    }
688
689    #[allow(unused)]
690    const LEN_SIZE: usize = std::mem::size_of::<crate::stack::Len>();
691
692    #[allow(unused)]
693    const TAG_SIZE: usize = std::mem::size_of::<Tag>();
694
695    #[allow(unused)]
696    const PAYLOAD_SIZE: usize = std::mem::size_of::<crate::backend::DefaultStr>();
697    type Payload = Padding<PAYLOAD_SIZE>;
698
699    #[allow(unused)]
700    const TARGET_SIZE: usize = std::mem::size_of::<Target>();
701    type Target = crate::string::StdString;
702
703    #[allow(unused)]
704    const MAX_CAPACITY: usize = TARGET_SIZE - LEN_SIZE - TAG_SIZE;
705
706    // Performance seems to slow down when trying to occupy all of the padding left by `String`'s
707    // discriminant.  The question is whether faster len=1-16 "allocations" outweighs going to the heap
708    // for len=17-22.
709    #[allow(unused)]
710    const ALIGNED_CAPACITY: usize = PAYLOAD_SIZE - LEN_SIZE;
711
712    #[cfg(feature = "max_inline")]
713    const CAPACITY: usize = MAX_CAPACITY;
714    #[cfg(not(feature = "max_inline"))]
715    const CAPACITY: usize = ALIGNED_CAPACITY;
716
717    const PAYLOAD_PAD_SIZE: usize = TARGET_SIZE - PAYLOAD_SIZE - TAG_SIZE;
718    const INLINE_PAD_SIZE: usize = TARGET_SIZE - CAPACITY - LEN_SIZE - TAG_SIZE;
719
720    #[derive(Copy, Clone)]
721    #[repr(C)]
722    struct TagVariant {
723        payload: Payload,
724        pad: Padding<PAYLOAD_PAD_SIZE>,
725        tag: Tag,
726    }
727    static_assertions::assert_eq_size!(Target, TagVariant);
728
729    #[derive(Copy, Clone)]
730    #[repr(C)]
731    struct SingletonVariant {
732        payload: &'static str,
733        pad: Padding<PAYLOAD_PAD_SIZE>,
734        tag: Tag,
735    }
736    static_assertions::assert_eq_size!(Payload, &'static str);
737    static_assertions::assert_eq_size!(Target, SingletonVariant);
738
739    impl SingletonVariant {
740        #[inline]
741        const fn new(payload: &'static str) -> Self {
742            Self {
743                payload,
744                pad: Padding::new(),
745                tag: Tag::SINGLETON,
746            }
747        }
748    }
749
750    impl std::fmt::Debug for SingletonVariant {
751        #[inline]
752        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
753            self.payload.fmt(f)
754        }
755    }
756
757    #[derive(Clone)]
758    #[repr(C)]
759    struct OwnedVariant<B> {
760        payload: B,
761        pad: Padding<PAYLOAD_PAD_SIZE>,
762        tag: Tag,
763    }
764    static_assertions::assert_eq_size!(Payload, crate::backend::DefaultStr);
765    static_assertions::assert_eq_size!(Target, OwnedVariant<crate::backend::DefaultStr>);
766
767    impl<B> OwnedVariant<B> {
768        #[inline]
769        const fn new(payload: B) -> Self {
770            Self {
771                payload,
772                pad: Padding::new(),
773                tag: Tag::OWNED,
774            }
775        }
776    }
777
778    impl<B: crate::backend::HeapStr> std::fmt::Debug for OwnedVariant<B> {
779        #[inline]
780        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
781            self.payload.fmt(f)
782        }
783    }
784
785    #[derive(Copy, Clone)]
786    #[repr(C)]
787    struct InlineVariant {
788        payload: StackString<CAPACITY>,
789        pad: Padding<INLINE_PAD_SIZE>,
790        tag: Tag,
791    }
792    static_assertions::assert_eq_size!(Target, InlineVariant);
793
794    impl InlineVariant {
795        #[inline]
796        const fn new(payload: StackString<CAPACITY>) -> Self {
797            Self {
798                payload,
799                pad: Padding::new(),
800                tag: Tag::INLINE,
801            }
802        }
803    }
804
805    impl std::fmt::Debug for InlineVariant {
806        #[inline]
807        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
808            self.payload.fmt(f)
809        }
810    }
811
812    #[derive(Copy, Clone, PartialEq, Eq)]
813    #[repr(transparent)]
814    struct Tag(u8);
815
816    impl Tag {
817        const SINGLETON: Tag = Tag(0);
818        const OWNED: Tag = Tag(u8::MAX);
819        const INLINE: Tag = Tag(1);
820
821        #[inline]
822        const fn is_singleton(self) -> bool {
823            self.0 == Self::SINGLETON.0
824        }
825
826        #[inline]
827        const fn is_owned(self) -> bool {
828            self.0 == Self::OWNED.0
829        }
830
831        #[inline]
832        const fn is_inline(self) -> bool {
833            !self.is_singleton() && !self.is_owned()
834        }
835    }
836
837    #[derive(Copy, Clone)]
838    #[repr(transparent)]
839    struct Padding<const L: usize>([std::mem::MaybeUninit<u8>; L]);
840
841    impl<const L: usize> Padding<L> {
842        const fn new() -> Self {
843            let padding = unsafe {
844                // SAFETY: Padding, never actually used
845                std::mem::MaybeUninit::uninit().assume_init()
846            };
847            Self(padding)
848        }
849    }
850
851    impl<const L: usize> Default for Padding<L> {
852        fn default() -> Self {
853            Self::new()
854        }
855    }
856}
857
858#[cfg(test)]
859mod test {
860    use super::*;
861
862    #[test]
863    fn test_size() {
864        println!("KString: {}", std::mem::size_of::<KString>());
865    }
866}