1#![no_std]
32
33extern crate alloc;
34
35use alloc::{boxed::Box, vec, vec::Vec};
36
37use core::cmp::max;
38use core::fmt;
39use core::hash;
40use core::mem::{forget, replace, size_of};
41use core::ops::{Index, Range};
42use core::slice;
43
44#[macro_export]
74macro_rules! sbvec {
75 ($elem:expr; $n:expr) => (
76 $crate::SmallBitVec::from_elem($n, $elem)
77 );
78 ($($x:expr),*) => (
79 [$($x),*].iter().cloned().collect::<$crate::SmallBitVec>()
80 );
81 ($($x:expr,)*) => (
82 sbvec![$($x),*]
83 );
84}
85
86macro_rules! const_debug_assert_le {
90 ($left: ident <= $right: expr) => {
91 #[cfg(debug_assertions)]
92 [(); $right + 1][$left];
94 };
95}
96
97#[cfg(test)]
98mod tests;
99
100pub struct SmallBitVec {
105 data: usize,
106}
107
108#[inline(always)]
110const fn inline_bits() -> usize {
111 size_of::<usize>() * 8
112}
113
114#[inline(always)]
119const fn inline_capacity() -> usize {
120 inline_bits() - 2
121}
122
123#[inline(always)]
125const fn inline_shift(n: usize) -> usize {
126 const_debug_assert_le!(n <= inline_capacity());
127 inline_bits() - 1 - n
129}
130
131#[inline(always)]
133const fn inline_index(n: usize) -> usize {
134 1 << inline_shift(n)
135}
136
137#[inline(always)]
139fn inline_ones(n: usize) -> usize {
140 if n == 0 {
141 0
142 } else {
143 !0 << (inline_bits() - n)
144 }
145}
146
147const HEAP_FLAG: usize = 1;
150
151type Storage = usize;
153
154#[inline(always)]
156fn bits_per_storage() -> usize {
157 size_of::<Storage>() * 8
158}
159
160struct Header {
164 len: Storage,
166
167 buffer_len: Storage,
169}
170
171impl Header {
172 fn new(cap: usize, len: usize, val: bool) -> *mut Header {
175 let alloc_len = header_len() + buffer_len(cap);
176 let init = if val { !0 } else { 0 };
177
178 let v: Vec<Storage> = vec![init; alloc_len];
179
180 let buffer_len = v.capacity() - header_len();
181 let header_ptr = v.as_ptr() as *mut Header;
182
183 forget(v);
184
185 unsafe {
186 (*header_ptr).len = len;
187 (*header_ptr).buffer_len = buffer_len;
188 }
189 header_ptr
190 }
191}
192
193#[inline(always)]
195fn header_len() -> usize {
196 size_of::<Header>() / size_of::<Storage>()
197}
198
199#[inline(always)]
201fn buffer_len(cap: usize) -> usize {
202 (cap + bits_per_storage() - 1) / bits_per_storage()
203}
204
205pub enum InternalStorage {
209 Inline(usize),
212
213 Spilled(Box<[usize]>),
215}
216
217impl SmallBitVec {
218 #[inline]
220 pub const fn new() -> SmallBitVec {
221 SmallBitVec {
222 data: inline_index(0),
223 }
224 }
225
226 #[inline]
228 pub fn from_elem(len: usize, val: bool) -> SmallBitVec {
229 if len <= inline_capacity() {
230 return SmallBitVec {
231 data: if val {
232 inline_ones(len + 1)
233 } else {
234 inline_index(len)
235 },
236 };
237 }
238 let header_ptr = Header::new(len, len, val);
239 SmallBitVec {
240 data: (header_ptr as usize) | HEAP_FLAG,
241 }
242 }
243
244 #[inline]
247 pub fn with_capacity(cap: usize) -> SmallBitVec {
248 if cap <= inline_capacity() {
250 return SmallBitVec::new();
251 }
252
253 let header_ptr = Header::new(cap, 0, false);
255 SmallBitVec {
256 data: (header_ptr as usize) | HEAP_FLAG,
257 }
258 }
259
260 #[inline]
262 pub fn len(&self) -> usize {
263 if self.is_inline() {
264 inline_bits() - self.data.trailing_zeros() as usize - 1
267 } else {
268 self.header().len
269 }
270 }
271
272 #[inline]
274 pub fn is_empty(&self) -> bool {
275 self.len() == 0
276 }
277
278 #[inline]
280 pub fn capacity(&self) -> usize {
281 if self.is_inline() {
282 inline_capacity()
283 } else {
284 self.header().buffer_len * bits_per_storage()
285 }
286 }
287
288 #[inline]
290 pub fn get(&self, n: usize) -> Option<bool> {
291 if n < self.len() {
292 Some(unsafe { self.get_unchecked(n) })
293 } else {
294 None
295 }
296 }
297
298 #[inline]
300 pub fn last(&self) -> Option<bool> {
301 self.len()
302 .checked_sub(1)
303 .map(|n| unsafe { self.get_unchecked(n) })
304 }
305
306 #[inline]
308 pub unsafe fn get_unchecked(&self, n: usize) -> bool {
309 if self.is_inline() {
310 self.data & inline_index(n) != 0
311 } else {
312 let buffer = self.buffer();
313 let i = n / bits_per_storage();
314 let offset = n % bits_per_storage();
315 *buffer.get_unchecked(i) & (1 << offset) != 0
316 }
317 }
318
319 #[inline]
321 pub fn set(&mut self, n: usize, val: bool) {
322 assert!(n < self.len(), "Index {} out of bounds", n);
323 unsafe {
324 self.set_unchecked(n, val);
325 }
326 }
327
328 #[inline]
330 pub unsafe fn set_unchecked(&mut self, n: usize, val: bool) {
331 if self.is_inline() {
332 if val {
333 self.data |= inline_index(n);
334 } else {
335 self.data &= !inline_index(n);
336 }
337 } else {
338 let buffer = self.buffer_mut();
339 let i = n / bits_per_storage();
340 let offset = n % bits_per_storage();
341 if val {
342 *buffer.get_unchecked_mut(i) |= 1 << offset;
343 } else {
344 *buffer.get_unchecked_mut(i) &= !(1 << offset);
345 }
346 }
347 }
348
349 #[inline]
360 pub fn push(&mut self, val: bool) {
361 let idx = self.len();
362 if idx == self.capacity() {
363 self.reserve(1);
364 }
365 unsafe {
366 self.set_len(idx + 1);
367 self.set_unchecked(idx, val);
368 }
369 }
370
371 #[inline]
383 pub fn pop(&mut self) -> Option<bool> {
384 self.len().checked_sub(1).map(|last| unsafe {
385 let val = self.get_unchecked(last);
386 self.set_len(last);
387 val
388 })
389 }
390
391 #[inline]
395 pub fn remove(&mut self, idx: usize) -> bool {
396 let len = self.len();
397 let val = self[idx];
398
399 if self.is_inline() {
400 let mask = !inline_ones(idx);
402 let new_vals = (self.data & mask) << 1;
403 self.data = (self.data & !mask) | (new_vals & mask);
404 } else {
405 let first = idx / bits_per_storage();
406 let offset = idx % bits_per_storage();
407 let count = buffer_len(len);
408 {
409 let buf = self.buffer_mut();
411 let mask = !0 << offset;
412 let new_vals = (buf[first] & mask) >> 1;
413 buf[first] = (buf[first] & !mask) | (new_vals & mask);
414 }
415 for i in (first + 1)..count {
417 let bit_idx = i * bits_per_storage();
419 unsafe {
420 let first_bit = self.get_unchecked(bit_idx);
421 self.set_unchecked(bit_idx - 1, first_bit);
422 }
423 self.buffer_mut()[i] >>= 1;
425 }
426 unsafe {
428 self.set_len(len - 1);
429 }
430 }
431 val
432 }
433
434 #[inline]
436 pub fn clear(&mut self) {
437 unsafe {
438 self.set_len(0);
439 }
440 }
441
442 #[inline]
450 pub fn reserve(&mut self, additional: usize) {
451 let old_cap = self.capacity();
452 let new_cap = self
453 .len()
454 .checked_add(additional)
455 .expect("capacity overflow");
456 if new_cap <= old_cap {
457 return;
458 }
459 let double_cap = old_cap.saturating_mul(2);
461 self.reallocate(max(new_cap, double_cap));
462 }
463
464 #[inline]
469 unsafe fn set_len(&mut self, len: usize) {
470 debug_assert!(len <= self.capacity());
471 if self.is_inline() {
472 let sentinel = inline_index(len);
473 let mask = !(sentinel - 1);
474 self.data |= sentinel;
475 self.data &= mask;
476 } else {
477 self.header_mut().len = len;
478 }
479 }
480
481 #[inline]
483 pub fn iter(&self) -> Iter {
484 Iter {
485 vec: self,
486 range: 0..self.len(),
487 }
488 }
489
490 #[inline]
498 pub fn range(&self, range: Range<usize>) -> VecRange {
499 assert!(range.end <= self.len(), "range out of bounds");
500 VecRange { vec: &self, range }
501 }
502
503 #[inline]
507 pub fn all_false(&self) -> bool {
508 let mut len = self.len();
509 if len == 0 {
510 return true;
511 }
512
513 if self.is_inline() {
514 let mask = inline_ones(len);
515 self.data & mask == 0
516 } else {
517 for &storage in self.buffer() {
518 if len >= bits_per_storage() {
519 if storage != 0 {
520 return false;
521 }
522 len -= bits_per_storage();
523 } else {
524 let mask = (1 << len) - 1;
525 if storage & mask != 0 {
526 return false;
527 }
528 break;
529 }
530 }
531 true
532 }
533 }
534
535 #[inline]
539 pub fn all_true(&self) -> bool {
540 let mut len = self.len();
541 if len == 0 {
542 return true;
543 }
544
545 if self.is_inline() {
546 let mask = inline_ones(len);
547 self.data & mask == mask
548 } else {
549 for &storage in self.buffer() {
550 if len >= bits_per_storage() {
551 if storage != !0 {
552 return false;
553 }
554 len -= bits_per_storage();
555 } else {
556 let mask = (1 << len) - 1;
557 if storage & mask != mask {
558 return false;
559 }
560 break;
561 }
562 }
563 true
564 }
565 }
566
567 pub fn truncate(&mut self, len: usize) {
574 unsafe {
575 if len < self.len() {
576 self.set_len(len);
577 }
578 }
579 }
580
581 pub fn resize(&mut self, len: usize, value: bool) {
588 let old_len = self.len();
589
590 if len > old_len {
591 unsafe {
592 self.reallocate(len);
593 self.set_len(len);
594 for i in old_len..len {
595 self.set(i, value);
596 }
597 }
598 } else {
599 self.truncate(len);
600 }
601 }
602
603 fn reallocate(&mut self, cap: usize) {
607 let old_cap = self.capacity();
608 if cap <= old_cap {
609 return;
610 }
611 assert!(self.len() <= cap);
612
613 if self.is_heap() {
614 let old_buffer_len = self.header().buffer_len;
615 let new_buffer_len = buffer_len(cap);
616
617 let old_alloc_len = header_len() + old_buffer_len;
618 let new_alloc_len = header_len() + new_buffer_len;
619
620 let old_ptr = self.header_raw() as *mut Storage;
621 let mut v = unsafe { Vec::from_raw_parts(old_ptr, old_alloc_len, old_alloc_len) };
622 v.resize(new_alloc_len, 0);
623 v.shrink_to_fit();
624 self.data = v.as_ptr() as usize | HEAP_FLAG;
625 forget(v);
626
627 self.header_mut().buffer_len = new_buffer_len;
628 } else {
629 let old_self = replace(self, SmallBitVec::with_capacity(cap));
630 unsafe {
631 self.set_len(old_self.len());
632 for i in 0..old_self.len() {
633 self.set_unchecked(i, old_self.get_unchecked(i));
634 }
635 }
636 }
637 }
638
639 #[inline]
643 pub fn heap_ptr(&self) -> Option<*const usize> {
644 if self.is_heap() {
645 Some((self.data & !HEAP_FLAG) as *const Storage)
646 } else {
647 None
648 }
649 }
650
651 #[inline]
655 pub fn into_storage(self) -> InternalStorage {
656 if self.is_heap() {
657 let alloc_len = header_len() + self.header().buffer_len;
658 let ptr = self.header_raw() as *mut Storage;
659 let slice = unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr, alloc_len)) };
660 forget(self);
661 InternalStorage::Spilled(slice)
662 } else {
663 InternalStorage::Inline(self.data)
664 }
665 }
666
667 pub unsafe fn from_storage(storage: InternalStorage) -> SmallBitVec {
700 match storage {
701 InternalStorage::Inline(data) => SmallBitVec { data },
702 InternalStorage::Spilled(vs) => {
703 let ptr = Box::into_raw(vs);
704 SmallBitVec {
705 data: (ptr as *mut usize as usize) | HEAP_FLAG,
706 }
707 }
708 }
709 }
710
711 #[inline]
713 fn is_inline(&self) -> bool {
714 self.data & HEAP_FLAG == 0
715 }
716
717 #[inline]
719 fn is_heap(&self) -> bool {
720 !self.is_inline()
721 }
722
723 #[inline]
725 fn header_raw(&self) -> *mut Header {
726 assert!(self.is_heap());
727 (self.data & !HEAP_FLAG) as *mut Header
728 }
729
730 #[inline]
731 fn header_mut(&mut self) -> &mut Header {
732 unsafe { &mut *self.header_raw() }
733 }
734
735 #[inline]
736 fn header(&self) -> &Header {
737 unsafe { &*self.header_raw() }
738 }
739
740 #[inline]
742 fn buffer_raw(&self) -> *mut [Storage] {
743 unsafe {
744 let header_ptr = self.header_raw();
745 let buffer_len = (*header_ptr).buffer_len;
746 let buffer_ptr = (header_ptr as *mut Storage)
747 .offset((size_of::<Header>() / size_of::<Storage>()) as isize);
748 slice::from_raw_parts_mut(buffer_ptr, buffer_len)
749 }
750 }
751
752 #[inline]
753 fn buffer_mut(&mut self) -> &mut [Storage] {
754 unsafe { &mut *self.buffer_raw() }
755 }
756
757 #[inline]
758 fn buffer(&self) -> &[Storage] {
759 unsafe { &*self.buffer_raw() }
760 }
761}
762
763impl fmt::Debug for SmallBitVec {
766 #[inline]
767 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
768 fmt.debug_list()
769 .entries(self.iter().map(|b| b as u8))
770 .finish()
771 }
772}
773
774impl Default for SmallBitVec {
775 #[inline]
776 fn default() -> Self {
777 Self::new()
778 }
779}
780
781impl PartialEq for SmallBitVec {
782 fn eq(&self, other: &Self) -> bool {
783 if self.is_inline() && other.is_inline() {
785 return self.data == other.data;
786 }
787
788 let len = self.len();
789 if len != other.len() {
790 return false;
791 }
792
793 if self.is_heap() && other.is_heap() {
795 let buf0 = self.buffer();
796 let buf1 = other.buffer();
797
798 let full_blocks = len / bits_per_storage();
799 let remainder = len % bits_per_storage();
800
801 if buf0[..full_blocks] != buf1[..full_blocks] {
802 return false;
803 }
804
805 if remainder != 0 {
806 let mask = (1 << remainder) - 1;
807 if buf0[full_blocks] & mask != buf1[full_blocks] & mask {
808 return false;
809 }
810 }
811 return true;
812 }
813
814 Iterator::eq(self.iter(), other.iter())
816 }
817}
818
819impl Eq for SmallBitVec {}
820
821impl Drop for SmallBitVec {
822 fn drop(&mut self) {
823 if self.is_heap() {
824 unsafe {
825 let header_ptr = self.header_raw();
826 let alloc_ptr = header_ptr as *mut Storage;
827 let alloc_len = header_len() + (*header_ptr).buffer_len;
828 Vec::from_raw_parts(alloc_ptr, alloc_len, alloc_len);
829 }
830 }
831 }
832}
833
834impl Clone for SmallBitVec {
835 fn clone(&self) -> Self {
836 if self.is_inline() {
837 return SmallBitVec { data: self.data };
838 }
839
840 let buffer_len = self.header().buffer_len;
841 let alloc_len = header_len() + buffer_len;
842 let ptr = self.header_raw() as *mut Storage;
843 let raw_allocation = unsafe { slice::from_raw_parts(ptr, alloc_len) };
844
845 let v = raw_allocation.to_vec();
846 let header_ptr = v.as_ptr() as *mut Header;
847 forget(v);
848 SmallBitVec {
849 data: (header_ptr as usize) | HEAP_FLAG,
850 }
851 }
852}
853
854impl Index<usize> for SmallBitVec {
855 type Output = bool;
856
857 #[inline(always)]
858 fn index(&self, i: usize) -> &bool {
859 assert!(i < self.len(), "index out of range");
860 if self.get(i).unwrap() {
861 &true
862 } else {
863 &false
864 }
865 }
866}
867
868#[cfg(feature = "malloc_size_of")]
869impl malloc_size_of::MallocSizeOf for SmallBitVec {
870 fn size_of(&self, ops: &mut malloc_size_of::MallocSizeOfOps) -> usize {
871 if let Some(ptr) = self.heap_ptr() {
872 unsafe { ops.malloc_size_of(ptr) }
873 } else {
874 0
875 }
876 }
877}
878
879impl hash::Hash for SmallBitVec {
880 #[inline]
881 fn hash<H: hash::Hasher>(&self, state: &mut H) {
882 let len = self.len();
883 len.hash(state);
884 if self.is_inline() {
885 (self.data & inline_ones(len)).reverse_bits().hash(state);
886 } else {
887 let full_blocks = len / bits_per_storage();
888 let remainder = len % bits_per_storage();
889 let buffer = self.buffer();
890 if full_blocks != 0 {
891 buffer[..full_blocks].hash(state);
892 }
893 if remainder != 0 {
894 let mask = (1 << remainder) - 1;
895 (buffer[full_blocks] & mask).hash(state);
896 }
897 }
898 }
899}
900
901impl Extend<bool> for SmallBitVec {
902 #[inline]
903 fn extend<I: IntoIterator<Item = bool>>(&mut self, iter: I) {
904 let iter = iter.into_iter();
905
906 let (min, _) = iter.size_hint();
907 assert!(min <= usize::max_value(), "capacity overflow");
908 self.reserve(min);
909
910 for element in iter {
911 self.push(element)
912 }
913 }
914}
915
916impl FromIterator<bool> for SmallBitVec {
917 #[inline]
918 fn from_iter<I: IntoIterator<Item = bool>>(iter: I) -> Self {
919 let mut v = SmallBitVec::new();
920 v.extend(iter);
921 v
922 }
923}
924
925impl IntoIterator for SmallBitVec {
926 type Item = bool;
927 type IntoIter = IntoIter;
928
929 #[inline]
930 fn into_iter(self) -> IntoIter {
931 IntoIter {
932 range: 0..self.len(),
933 vec: self,
934 }
935 }
936}
937
938impl<'a> IntoIterator for &'a SmallBitVec {
939 type Item = bool;
940 type IntoIter = Iter<'a>;
941
942 #[inline]
943 fn into_iter(self) -> Iter<'a> {
944 self.iter()
945 }
946}
947
948pub struct IntoIter {
954 vec: SmallBitVec,
955 range: Range<usize>,
956}
957
958impl Iterator for IntoIter {
959 type Item = bool;
960
961 #[inline]
962 fn next(&mut self) -> Option<bool> {
963 self.range
964 .next()
965 .map(|i| unsafe { self.vec.get_unchecked(i) })
966 }
967
968 #[inline]
969 fn size_hint(&self) -> (usize, Option<usize>) {
970 self.range.size_hint()
971 }
972}
973
974impl DoubleEndedIterator for IntoIter {
975 #[inline]
976 fn next_back(&mut self) -> Option<bool> {
977 self.range
978 .next_back()
979 .map(|i| unsafe { self.vec.get_unchecked(i) })
980 }
981}
982
983impl ExactSizeIterator for IntoIter {}
984
985pub struct Iter<'a> {
991 vec: &'a SmallBitVec,
992 range: Range<usize>,
993}
994
995impl<'a> Default for Iter<'a> {
996 #[inline]
997 fn default() -> Self {
998 const EMPTY: &'static SmallBitVec = &SmallBitVec::new();
999 Self {
1000 vec: EMPTY,
1001 range: 0..0,
1002 }
1003 }
1004}
1005
1006impl<'a> Iterator for Iter<'a> {
1007 type Item = bool;
1008
1009 #[inline]
1010 fn next(&mut self) -> Option<bool> {
1011 self.range
1012 .next()
1013 .map(|i| unsafe { self.vec.get_unchecked(i) })
1014 }
1015
1016 #[inline]
1017 fn size_hint(&self) -> (usize, Option<usize>) {
1018 self.range.size_hint()
1019 }
1020}
1021
1022impl<'a> DoubleEndedIterator for Iter<'a> {
1023 #[inline]
1024 fn next_back(&mut self) -> Option<bool> {
1025 self.range
1026 .next_back()
1027 .map(|i| unsafe { self.vec.get_unchecked(i) })
1028 }
1029}
1030
1031impl<'a> ExactSizeIterator for Iter<'a> {}
1032
1033#[derive(Debug, Clone)]
1039pub struct VecRange<'a> {
1040 vec: &'a SmallBitVec,
1041 range: Range<usize>,
1042}
1043
1044impl<'a> VecRange<'a> {
1045 #[inline]
1046 pub fn iter(&self) -> Iter<'a> {
1047 Iter {
1048 vec: self.vec,
1049 range: self.range.clone(),
1050 }
1051 }
1052}
1053
1054impl<'a> Index<usize> for VecRange<'a> {
1055 type Output = bool;
1056
1057 #[inline]
1058 fn index(&self, i: usize) -> &bool {
1059 let vec_i = i + self.range.start;
1060 assert!(vec_i < self.range.end, "index out of range");
1061 &self.vec[vec_i]
1062 }
1063}