read_fonts/collections/int_set/
bitset.rs

1//! A fast, efficient, sparse, & ordered unsigned integer (`u32`) bit set.
2//!
3//! There are a couple of differences with [`super::IntSet`]:
4//! - This set is not invertible and can only record the set of integers which are members.
5//! - This set works only with `u32` values, unlike [`super::IntSet`] which supports custom integer types.
6//!
7//! When dealing with only `u32`'s and invertibility is not needed then this set is slightly faster
8//! than the more generic [`super::IntSet`].
9//!
10//! The bitset is implemented using fixed size pages which allows it to compactly
11//! represent sparse membership. However, the set excels when set members are typically
12//! clustered together. For example when representing glyph id or unicode codepoint values
13//! in a font.
14//!
15//! When constructing a new [`U32Set`] from an existing list of integer values the most efficient
16//! way to create the set is to initialize it from a sorted list of values via the extend() method.
17
18use super::bitpage::BitPage;
19use super::bitpage::RangeIter;
20use super::bitpage::PAGE_BITS;
21use core::sync::atomic::AtomicUsize;
22use std::cmp::Ordering;
23use std::hash::Hash;
24
25use std::ops::RangeInclusive;
26
27// log_2(PAGE_BITS)
28const PAGE_BITS_LOG_2: u32 = PAGE_BITS.ilog2();
29
30/// A fast, efficient, sparse, & ordered `u32` set.
31///
32/// For a higher-level API that supports inversion and generic int types, use [`super::IntSet`]
33#[derive(Debug)]
34#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
35pub struct U32Set {
36    // TODO(garretrieger): consider a "small array" type instead of Vec.
37    pages: Vec<BitPage>,
38    page_map: Vec<PageInfo>,
39    length: u64,
40
41    #[cfg_attr(feature = "serde", serde(skip))]
42    #[cfg_attr(feature = "serde", serde(default = "default_last_page_map_index"))]
43    last_page_map_index: AtomicUsize,
44}
45
46const fn default_last_page_map_index() -> AtomicUsize {
47    AtomicUsize::new(usize::MAX)
48}
49
50impl Default for U32Set {
51    fn default() -> Self {
52        Self {
53            pages: Default::default(),
54            page_map: Default::default(),
55            length: Default::default(),
56            last_page_map_index: default_last_page_map_index(),
57        }
58    }
59}
60
61impl Clone for U32Set {
62    fn clone(&self) -> Self {
63        Self {
64            pages: self.pages.clone(),
65            page_map: self.page_map.clone(),
66            length: self.length,
67            // last_page_map_index has no effect on the externally visible state of the set
68            // so it can just be reset to the default value.
69            last_page_map_index: default_last_page_map_index(),
70        }
71    }
72}
73
74impl FromIterator<u32> for U32Set {
75    fn from_iter<I: IntoIterator<Item = u32>>(iter: I) -> Self {
76        let mut s = U32Set::empty();
77        s.extend(iter);
78        s
79    }
80}
81
82impl U32Set {
83    /// Add val as a member of this set.
84    ///
85    /// If the set did not previously contain this value, returns `true`.
86    pub fn insert(&mut self, val: u32) -> bool {
87        let page = self.ensure_page_for_mut(val);
88        let ret = page.insert(val);
89        self.length += ret as u64;
90        ret
91    }
92
93    /// Add all values in range as members of this set.
94    pub fn insert_range(&mut self, range: RangeInclusive<u32>) {
95        let start = *range.start();
96        let end = *range.end();
97        if start > end {
98            return;
99        }
100
101        let major_start = Self::get_major_value(start);
102        let major_end = Self::get_major_value(end);
103
104        let mut total_added = 0;
105
106        for major in major_start..=major_end {
107            let page_start = start.max(Self::major_start(major));
108            let page_end = end.min(Self::major_start(major) + (PAGE_BITS - 1));
109            let page = self.ensure_page_for_major_mut(major);
110            let pre_len = page.len();
111            page.insert_range(page_start, page_end);
112            let delta_len = page.len() - pre_len;
113            total_added += delta_len as u64;
114        }
115        self.length += total_added;
116    }
117
118    /// An alternate version of [`extend()`] which is optimized for inserting an unsorted
119    /// iterator of values.
120    ///
121    /// [`extend()`]: Self::extend
122    pub fn extend_unsorted<U: IntoIterator<Item = u32>>(&mut self, iter: U) {
123        self.length += iter
124            .into_iter()
125            .map(|val| {
126                let major_value = Self::get_major_value(val);
127                let page = self.ensure_page_for_major_mut(major_value);
128                page.insert(val) as u64
129            })
130            .sum::<u64>();
131    }
132
133    /// Remove val from this set.
134    ///
135    /// Returns `true` if the value was present.
136    pub fn remove(&mut self, val: u32) -> bool {
137        let maybe_page = self.page_for_mut(val);
138        if let Some(page) = maybe_page {
139            let ret = page.remove(val);
140            self.length -= ret as u64;
141            ret
142        } else {
143            false
144        }
145    }
146
147    // Remove all values in iter from this set.
148    pub fn remove_all<U: IntoIterator<Item = u32>>(&mut self, iter: U) {
149        let mut last_page_index: Option<usize> = None;
150        let mut last_major_value = u32::MAX;
151        let mut total_removed = 0;
152        for val in iter {
153            let major_value = Self::get_major_value(val);
154            if major_value != last_major_value {
155                last_page_index = self.page_index_for_major(major_value);
156                last_major_value = major_value;
157            };
158
159            let Some(page_index) = last_page_index else {
160                continue;
161            };
162
163            if let Some(page) = self.pages.get_mut(page_index) {
164                total_removed += page.remove(val) as u64;
165            }
166        }
167        self.length -= total_removed;
168    }
169
170    /// Removes all values in range as members of this set.
171    pub fn remove_range(&mut self, range: RangeInclusive<u32>) {
172        let start = *(range.start());
173        let end = *(range.end());
174        if start > end {
175            return;
176        }
177
178        let start_major = Self::get_major_value(start);
179        let end_major = Self::get_major_value(end);
180        let mut info_index = match self
181            .page_map
182            .binary_search_by(|probe| probe.major_value.cmp(&start_major))
183        {
184            Ok(info_index) => info_index,
185            Err(info_index) => info_index,
186        };
187
188        loop {
189            let Some(info) = self.page_map.get(info_index) else {
190                break;
191            };
192            let Some(page) = self.pages.get_mut(info.index as usize) else {
193                break;
194            };
195
196            if info.major_value > end_major {
197                break;
198            } else if info.major_value == start_major {
199                page.remove_range(start, Self::major_end(start_major).min(end));
200            } else if info.major_value == end_major {
201                page.remove_range(Self::major_start(end_major), end);
202                break;
203            } else {
204                page.clear();
205            }
206            info_index += 1;
207        }
208
209        self.recompute_length();
210    }
211
212    /// Returns true if val is a member of this set.
213    pub fn contains(&self, val: u32) -> bool {
214        let new_major = U32Set::get_major_value(val);
215
216        let lookup_result = self
217            .page_map
218            .get(
219                self.last_page_map_index
220                    .load(std::sync::atomic::Ordering::Relaxed),
221            )
222            .filter(|info| info.major_value == new_major)
223            .map(|info| Some(info.index as usize))
224            .unwrap_or(None);
225
226        let page_index = match lookup_result {
227            None => {
228                // Cached value needs an update, lookup the actual page map index.
229                let Some(page_map_index) = self.page_map_index_for_major(new_major) else {
230                    // No page exists for this value so it's not present and we don't need to update cached values.
231                    return false;
232                };
233
234                self.last_page_map_index
235                    .store(page_map_index, std::sync::atomic::Ordering::Relaxed);
236                self.page_map[page_map_index].index as usize
237            }
238            Some(page_index) => page_index,
239        };
240
241        self.pages
242            .get(page_index)
243            .map(|page| page.contains(val))
244            .unwrap_or(false)
245    }
246
247    pub const fn empty() -> U32Set {
248        U32Set {
249            pages: Vec::new(),
250            page_map: Vec::new(),
251            length: 0,
252            last_page_map_index: default_last_page_map_index(),
253        }
254    }
255
256    /// Remove all members from this set.
257    pub fn clear(&mut self) {
258        self.pages.clear();
259        self.page_map.clear();
260        self.length = 0;
261    }
262
263    /// Return true if there are no members in this set.
264    pub fn is_empty(&self) -> bool {
265        self.len() == 0
266    }
267
268    fn recompute_length(&mut self) {
269        self.length = self.pages.iter().map(|page| page.len() as u64).sum();
270    }
271
272    /// Returns the number of members in this set.
273    pub fn len(&self) -> u64 {
274        self.length
275    }
276
277    pub(crate) fn num_pages(&self) -> usize {
278        self.pages.len()
279    }
280
281    /// Sets the members of this set to the union of self and other.
282    pub fn union(&mut self, other: &U32Set) {
283        self.process(BitPage::union, other);
284    }
285
286    /// Sets the members of this set to the intersection of self and other.
287    pub fn intersect(&mut self, other: &U32Set) {
288        self.process(BitPage::intersect, other);
289    }
290
291    /// Sets the members of this set to self - other.
292    pub fn subtract(&mut self, other: &U32Set) {
293        self.process(BitPage::subtract, other);
294    }
295
296    /// Sets the members of this set to other - self.
297    pub fn reversed_subtract(&mut self, other: &U32Set) {
298        self.process(|a, b| BitPage::subtract(b, a), other);
299    }
300
301    /// Iterator over the members of this set. In sorted order (ascending).
302    pub fn iter(&self) -> impl DoubleEndedIterator<Item = u32> + '_ {
303        self.iter_non_empty_pages().flat_map(|(major, page)| {
304            let base = Self::major_start(major);
305            page.iter().map(move |v| base + v)
306        })
307    }
308
309    /// Iterator over the members of this set starting from value.
310    ///
311    /// So value is included in the iterator if it's in the set.
312    pub fn iter_from(&self, value: u32) -> impl Iterator<Item = u32> + '_ {
313        let major_value = Self::get_major_value(value);
314        let result = self
315            .page_map
316            .binary_search_by(|probe| probe.major_value.cmp(&major_value));
317
318        let (page_map_index, partial_first_page) = match result {
319            Ok(page_map_index) => (page_map_index, true),
320            Err(page_map_index) => (page_map_index, false),
321        };
322
323        let page = self
324            .page_map
325            .get(page_map_index)
326            .and_then(move |page_info| {
327                self.pages
328                    .get(page_info.index as usize)
329                    .map(|page| (page, page_info.major_value))
330            });
331
332        let init_it =
333            page.filter(|_| partial_first_page)
334                .into_iter()
335                .flat_map(move |(page, major)| {
336                    let base = Self::major_start(major);
337                    page.iter_from(value).map(move |v| base + v)
338                });
339
340        let follow_on_page_map_index = if partial_first_page {
341            page_map_index + 1
342        } else {
343            page_map_index
344        };
345
346        let follow_on_it = self.page_map[follow_on_page_map_index..]
347            .iter()
348            .flat_map(|info| {
349                self.pages
350                    .get(info.index as usize)
351                    .map(|page| (info.major_value, page))
352            })
353            .filter(|(_, page)| !page.is_empty())
354            .flat_map(|(major, page)| {
355                let base = Self::major_start(major);
356                page.iter().map(move |v| base + v)
357            });
358
359        init_it.chain(follow_on_it)
360    }
361
362    /// Iterate over the ranges of contiguous values in this set.
363    pub fn iter_ranges(&self) -> impl Iterator<Item = RangeInclusive<u32>> + '_ {
364        U32SetRangeIter::new(self)
365    }
366
367    fn iter_pages(&self) -> impl DoubleEndedIterator<Item = (u32, &BitPage)> + '_ {
368        self.page_map.iter().flat_map(|info| {
369            self.pages
370                .get(info.index as usize)
371                .map(|page| (info.major_value, page))
372        })
373    }
374
375    fn iter_non_empty_pages(&self) -> impl DoubleEndedIterator<Item = (u32, &BitPage)> + '_ {
376        self.iter_pages().filter(|(_, page)| !page.is_empty())
377    }
378
379    /// Determine the passthrough behaviour of the operator.
380    ///
381    /// The passthrough behaviour is what happens to a page on one side of the operation if the other side is 0.
382    /// For example union passes through both left and right sides since it preserves the left or right side when
383    /// the other side is 0. Knowing this lets us optimize some cases when only one page is present on one side.
384    fn passthrough_behavior<Op>(op: &Op) -> (bool, bool)
385    where
386        Op: Fn(&BitPage, &BitPage) -> BitPage,
387    {
388        let mut one: BitPage = BitPage::new_zeroes();
389        one.insert(0);
390        let zero: BitPage = BitPage::new_zeroes();
391
392        let passthrough_left: bool = op(&one, &zero).contains(0);
393        let passthrough_right: bool = op(&zero, &one).contains(0);
394
395        (passthrough_left, passthrough_right)
396    }
397
398    fn process<Op>(&mut self, op: Op, other: &U32Set)
399    where
400        Op: Fn(&BitPage, &BitPage) -> BitPage,
401    {
402        let (passthrough_left, passthrough_right) = U32Set::passthrough_behavior(&op);
403
404        let mut len_a = self.pages.len();
405        let len_b = other.pages.len();
406        let mut idx_a = 0;
407        let mut idx_b = 0;
408        let mut count = 0;
409        let mut write_idx = 0;
410
411        // Step 1: Estimate the new size of this set (in number of pages) after processing, and remove left side
412        //         pages that won't be needed.
413        while idx_a < len_a && idx_b < len_b {
414            let a_major = self.page_map[idx_a].major_value;
415            let b_major = other.page_map[idx_b].major_value;
416
417            match a_major.cmp(&b_major) {
418                Ordering::Equal => {
419                    if !passthrough_left {
420                        // If we don't passthrough the left side, then the only case where we
421                        // keep a page from the left is when there is also a page at the same major
422                        // on the right side. In this case move page_map entries that we're keeping
423                        // on the left side set to the front of the page_map vector. Otherwise if
424                        // we do passthrough left, then we we keep all left hand side pages and this
425                        // isn't necessary.
426                        if write_idx < idx_a {
427                            self.page_map[write_idx] = self.page_map[idx_a];
428                        }
429                        write_idx += 1;
430                    }
431
432                    count += 1;
433                    idx_a += 1;
434                    idx_b += 1;
435                }
436                Ordering::Less => {
437                    if passthrough_left {
438                        count += 1;
439                    }
440                    idx_a += 1;
441                }
442                Ordering::Greater => {
443                    if passthrough_right {
444                        count += 1;
445                    }
446                    idx_b += 1;
447                }
448            }
449        }
450
451        if passthrough_left {
452            count += len_a - idx_a;
453        }
454
455        if passthrough_right {
456            count += len_b - idx_b;
457        }
458
459        // Step 2: compact and resize for the new estimated left side size.
460        let mut next_page = len_a;
461        if !passthrough_left {
462            len_a = write_idx;
463            next_page = write_idx;
464            self.compact(write_idx);
465        }
466
467        self.resize(count);
468        let new_count = count;
469
470        // Step 3: process and apply op in-place from the last to first page.
471        idx_a = len_a;
472        idx_b = len_b;
473        while idx_a > 0 && idx_b > 0 {
474            match self.page_map[idx_a - 1]
475                .major_value
476                .cmp(&other.page_map[idx_b - 1].major_value)
477            {
478                Ordering::Equal => {
479                    idx_a -= 1;
480                    idx_b -= 1;
481                    count -= 1;
482                    self.page_map[count] = self.page_map[idx_a];
483                    *self.page_for_index_mut(count).unwrap() = op(
484                        self.page_for_index(idx_a).unwrap(),
485                        other.page_for_index(idx_b).unwrap(),
486                    );
487                }
488                Ordering::Greater => {
489                    idx_a -= 1;
490                    if passthrough_left {
491                        count -= 1;
492                        self.page_map[count] = self.page_map[idx_a];
493                    }
494                }
495                Ordering::Less => {
496                    idx_b -= 1;
497                    if passthrough_right {
498                        count -= 1;
499                        self.page_map[count].major_value = other.page_map[idx_b].major_value;
500                        self.page_map[count].index = next_page as u32;
501                        next_page += 1;
502                        *self.page_for_index_mut(count).unwrap() =
503                            other.page_for_index(idx_b).unwrap().clone();
504                    }
505                }
506            }
507        }
508
509        // Step 4: there are only pages left on one side now, finish processing them if the appropriate passthrough is
510        //         enabled.
511        if passthrough_left {
512            while idx_a > 0 {
513                idx_a -= 1;
514                count -= 1;
515                self.page_map[count] = self.page_map[idx_a];
516            }
517        }
518
519        if passthrough_right {
520            while idx_b > 0 {
521                idx_b -= 1;
522                count -= 1;
523                self.page_map[count].major_value = other.page_map[idx_b].major_value;
524                self.page_map[count].index = next_page as u32;
525                next_page += 1;
526                *self.page_for_index_mut(count).unwrap() =
527                    other.page_for_index(idx_b).unwrap().clone();
528            }
529        }
530
531        self.resize(new_count);
532        self.recompute_length();
533    }
534
535    fn compact(&mut self, new_len: usize) {
536        let mut old_index_to_page_map_index = Vec::<usize>::with_capacity(self.pages.len());
537        old_index_to_page_map_index.resize(self.pages.len(), usize::MAX);
538
539        for i in 0usize..new_len {
540            old_index_to_page_map_index[self.page_map[i].index as usize] = i;
541        }
542
543        self.compact_pages(old_index_to_page_map_index);
544    }
545
546    fn compact_pages(&mut self, old_index_to_page_map_index: Vec<usize>) {
547        let mut write_index = 0;
548        for (i, page_map_index) in old_index_to_page_map_index
549            .iter()
550            .enumerate()
551            .take(self.pages.len())
552        {
553            if *page_map_index == usize::MAX {
554                continue;
555            }
556
557            if write_index < i {
558                self.pages[write_index] = self.pages[i].clone();
559            }
560
561            self.page_map[*page_map_index].index = write_index as u32;
562            write_index += 1;
563        }
564    }
565
566    fn resize(&mut self, new_len: usize) {
567        self.page_map.resize(
568            new_len,
569            PageInfo {
570                major_value: 0,
571                index: 0,
572            },
573        );
574        self.pages.resize(new_len, BitPage::new_zeroes());
575    }
576
577    /// Return the major value (top 23 bits) of the page associated with value.
578    const fn get_major_value(value: u32) -> u32 {
579        value >> PAGE_BITS_LOG_2
580    }
581
582    const fn major_start(major: u32) -> u32 {
583        major << PAGE_BITS_LOG_2
584    }
585
586    const fn major_end(major: u32) -> u32 {
587        // Note: (PAGE_BITS - 1) must be grouped to prevent overflow on addition for the largest page.
588        Self::major_start(major) + (PAGE_BITS - 1)
589    }
590
591    /// Returns the index in `self.pages` (if it exists) for the page with the same major as `major_value`.
592    fn page_index_for_major(&self, major_value: u32) -> Option<usize> {
593        self.page_map_index_for_major(major_value)
594            .map(|info_idx| self.page_map[info_idx].index as usize)
595    }
596
597    fn page_map_index_for_major(&self, major_value: u32) -> Option<usize> {
598        self.page_map
599            .binary_search_by(|probe| probe.major_value.cmp(&major_value))
600            .ok()
601    }
602
603    /// Returns the index in `self.pages` for the page with the same major as `major_value`. Will create
604    /// the page if it does not yet exist.
605    fn ensure_page_index_for_major(&mut self, major_value: u32) -> usize {
606        match self
607            .page_map
608            .binary_search_by(|probe| probe.major_value.cmp(&major_value))
609        {
610            Ok(map_index) => self.page_map[map_index].index as usize,
611            Err(map_index_to_insert) => {
612                let page_index = self.pages.len();
613                self.pages.push(BitPage::new_zeroes());
614                let new_info = PageInfo {
615                    index: page_index as u32,
616                    major_value,
617                };
618                self.page_map.insert(map_index_to_insert, new_info);
619                page_index
620            }
621        }
622    }
623
624    /// Return a mutable reference to the page that `value` resides in.
625    ///
626    /// Insert a new page if it doesn't exist.
627    fn page_for_mut(&mut self, value: u32) -> Option<&mut BitPage> {
628        let major_value = Self::get_major_value(value);
629        self.page_for_major_mut(major_value)
630    }
631
632    /// Return a mutable reference to the page with major value equal to `major_value`.
633    fn page_for_major_mut(&mut self, major_value: u32) -> Option<&mut BitPage> {
634        let page_index = self.page_index_for_major(major_value)?;
635        self.pages.get_mut(page_index)
636    }
637
638    /// Return a mutable reference to the page that `value` resides in.
639    ///
640    /// Insert a new page if it doesn't exist.
641    fn ensure_page_for_mut(&mut self, value: u32) -> &mut BitPage {
642        self.ensure_page_for_major_mut(Self::get_major_value(value))
643    }
644
645    /// Return a mutable reference to the page with major value equal to `major_value`.
646    /// Inserts a new page if it doesn't exist.
647    fn ensure_page_for_major_mut(&mut self, major_value: u32) -> &mut BitPage {
648        let page_index = self.ensure_page_index_for_major(major_value);
649        self.pages.get_mut(page_index).unwrap()
650    }
651
652    /// Return the mutable page at a given index
653    fn page_for_index_mut(&mut self, index: usize) -> Option<&mut BitPage> {
654        self.page_map
655            .get(index)
656            .and_then(|info| self.pages.get_mut(info.index as usize))
657    }
658
659    fn page_for_index(&self, index: usize) -> Option<&BitPage> {
660        self.page_map
661            .get(index)
662            .and_then(|info| self.pages.get(info.index as usize))
663    }
664}
665
666impl Extend<u32> for U32Set {
667    fn extend<U: IntoIterator<Item = u32>>(&mut self, iter: U) {
668        let mut builder = U32SetBuilder::start(self);
669        for val in iter {
670            builder.insert(val);
671        }
672        builder.finish();
673    }
674}
675
676/// This helper is used to construct [`U32Set`]'s from a stream of possibly sorted values.
677/// It remembers the last page index to reduce the amount of page lookups needed when inserting
678/// sorted data. If given unsorted values it will still work correctly, but may be slower then just
679/// repeatedly calling `insert()` on the bitset.
680pub(crate) struct U32SetBuilder<'a> {
681    pub(crate) set: &'a mut U32Set,
682    last_page_index: usize,
683    last_major_value: u32,
684}
685
686impl<'a> U32SetBuilder<'a> {
687    pub(crate) fn start(set: &'a mut U32Set) -> Self {
688        Self {
689            set,
690            last_page_index: usize::MAX,
691            last_major_value: u32::MAX,
692        }
693    }
694
695    pub(crate) fn insert(&mut self, val: u32) {
696        // TODO(garretrieger): additional optimization ideas:
697        // - Assuming data is sorted accumulate a single element mask and only commit it to the element
698        //   once the next value passes the end of the element.
699        let major_value = U32Set::get_major_value(val);
700        if major_value != self.last_major_value {
701            self.last_page_index = self.set.ensure_page_index_for_major(major_value);
702            self.last_major_value = major_value;
703        };
704        if let Some(page) = self.set.pages.get_mut(self.last_page_index) {
705            self.set.length += page.insert(val) as u64;
706        }
707    }
708
709    pub(crate) fn finish(self) {
710        // we used to do some finalization and bookkeeping here, and we will
711        // want to again if we optimize the impl more.
712    }
713}
714
715#[derive(Clone, Copy, Debug, PartialEq, Eq)]
716#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
717struct PageInfo {
718    // index into pages vector of this page
719    index: u32,
720    /// the top 23 bits of values covered by this page
721    major_value: u32,
722}
723
724impl Hash for U32Set {
725    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
726        self.iter_non_empty_pages().for_each(|t| t.hash(state));
727    }
728}
729
730impl std::cmp::PartialEq for U32Set {
731    fn eq(&self, other: &Self) -> bool {
732        let mut this = self.iter_non_empty_pages();
733        let mut other = other.iter_non_empty_pages();
734
735        // Note: normally we would prefer to use zip, but we also
736        //       need to check that both iters have the same length.
737        loop {
738            match (this.next(), other.next()) {
739                (Some(a), Some(b)) if a == b => continue,
740                (None, None) => return true,
741                _ => return false,
742            }
743        }
744    }
745}
746
747impl std::cmp::Eq for U32Set {}
748
749impl std::cmp::PartialOrd for U32Set {
750    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
751        Some(self.cmp(other))
752    }
753}
754
755impl std::cmp::Ord for U32Set {
756    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
757        let this_it = self.iter();
758        let other_it = other.iter();
759
760        for (us, them) in this_it.zip(other_it) {
761            match us.cmp(&them) {
762                core::cmp::Ordering::Equal => continue,
763                other => return other,
764            }
765        }
766
767        // all items in iter are the same: is one collection longer?
768        self.len().cmp(&other.len())
769    }
770}
771
772struct U32SetRangeIter<'a> {
773    set: &'a U32Set,
774    page_info_index: usize,
775    page_iter: Option<RangeIter<'a>>,
776}
777
778impl<'a> U32SetRangeIter<'a> {
779    fn new(set: &'a U32Set) -> U32SetRangeIter<'a> {
780        U32SetRangeIter {
781            set,
782            page_info_index: 0,
783            page_iter: U32SetRangeIter::<'a>::page_iter(set, 0),
784        }
785    }
786
787    fn move_to_next_page(&mut self) -> bool {
788        self.page_info_index += 1;
789        self.reset_page_iter();
790        self.page_iter.is_some()
791    }
792
793    fn reset_page_iter(&mut self) {
794        self.page_iter = U32SetRangeIter::<'a>::page_iter(self.set, self.page_info_index);
795    }
796
797    fn page_iter(set: &'a U32Set, page_info_index: usize) -> Option<RangeIter<'a>> {
798        set.page_map
799            .get(page_info_index)
800            .map(|pi| pi.index as usize)
801            .and_then(|index| set.pages.get(index))
802            .map(|p| p.iter_ranges())
803    }
804
805    fn next_range(&mut self) -> Option<RangeInclusive<u32>> {
806        // TODO(garretrieger): don't recompute page start on each call.
807        let page = self.set.page_map.get(self.page_info_index)?;
808        let page_start = U32Set::major_start(page.major_value);
809        self.page_iter
810            .as_mut()?
811            .next()
812            .map(|r| (r.start() + page_start)..=(r.end() + page_start))
813    }
814}
815
816impl Iterator for U32SetRangeIter<'_> {
817    type Item = RangeInclusive<u32>;
818
819    fn next(&mut self) -> Option<Self::Item> {
820        self.page_iter.as_ref()?;
821        let mut current_range = self.next_range();
822        loop {
823            let page = self.set.page_map.get(self.page_info_index)?;
824            let page_end = U32Set::major_end(page.major_value);
825
826            let Some(range) = current_range.clone() else {
827                // The current page has no more ranges, but there may be more pages.
828                if !self.move_to_next_page() {
829                    return None;
830                }
831                current_range = self.next_range();
832                continue;
833            };
834
835            if *range.end() != page_end {
836                break;
837            }
838
839            // The range goes right to the end of the current page and may continue into it.
840            self.move_to_next_page();
841            let continuation = self.next_range();
842            let Some(continuation) = continuation else {
843                break;
844            };
845
846            if *continuation.start() == *range.end() + 1 {
847                current_range = Some(*range.start()..=*continuation.end());
848                continue;
849            }
850
851            // Continuation range does not touch the current range, ignore it and return what we have.
852            // Since we consumed an item from the new page iterator, reset it.
853            self.reset_page_iter();
854            break;
855        }
856
857        current_range
858    }
859}
860
861#[cfg(test)]
862mod test {
863    use super::*;
864    use std::collections::HashSet;
865
866    #[test]
867    fn len() {
868        let bitset = U32Set::empty();
869        assert_eq!(bitset.len(), 0);
870        assert!(bitset.is_empty());
871    }
872
873    #[test]
874    fn from_iter() {
875        let mut expected = U32Set::empty();
876        expected.extend([2, 8, 13]);
877
878        assert_eq!(U32Set::from_iter([2, 8, 13]), expected);
879        assert_eq!(U32Set::from_iter([8, 2, 13]), expected);
880    }
881
882    #[test]
883    fn iter() {
884        let mut bitset = U32Set::empty();
885        bitset.insert(3);
886        bitset.insert(8);
887        bitset.insert(534);
888        bitset.insert(700);
889        bitset.insert(10000);
890        bitset.insert(10001);
891        bitset.insert(10002);
892
893        let v: Vec<u32> = bitset.iter().collect();
894        assert_eq!(v, vec![3, 8, 534, 700, 10000, 10001, 10002]);
895    }
896
897    fn check_iter_ranges(ranges: Vec<RangeInclusive<u32>>) {
898        let mut set = U32Set::empty();
899        for range in ranges.iter() {
900            set.insert_range(*range.start()..=*range.end());
901        }
902        let items: Vec<_> = set.iter_ranges().collect();
903        assert_eq!(items, ranges);
904    }
905
906    #[test]
907    fn iter_ranges() {
908        check_iter_ranges(vec![0..=0]);
909        check_iter_ranges(vec![4578..=4578]);
910        check_iter_ranges(vec![0..=10, 4578..=4583]);
911        check_iter_ranges(vec![0..=700]);
912        check_iter_ranges(vec![353..=737]);
913
914        check_iter_ranges(vec![u32::MAX..=u32::MAX]);
915        check_iter_ranges(vec![(u32::MAX - 10)..=u32::MAX]);
916        check_iter_ranges(vec![0..=5, (u32::MAX - 5)..=u32::MAX]);
917
918        check_iter_ranges(vec![0..=511, 513..=517]);
919        check_iter_ranges(vec![512..=1023, 1025..=1027]);
920
921        check_iter_ranges(vec![1792..=2650]);
922    }
923
924    #[test]
925    fn iter_ranges_zero_pages() {
926        let mut set = U32Set::empty();
927
928        set.insert(1000);
929        set.insert_range(300..=511);
930        set.remove(1000);
931
932        let items: Vec<_> = set.iter_ranges().collect();
933        assert_eq!(items, vec![300..=511]);
934    }
935
936    #[test]
937    fn iter_backwards() {
938        let mut bitset = U32Set::empty();
939
940        bitset.insert_range(1..=6);
941        {
942            let mut it = bitset.iter();
943            assert_eq!(Some(1), it.next());
944            assert_eq!(Some(6), it.next_back());
945            assert_eq!(Some(5), it.next_back());
946            assert_eq!(Some(2), it.next());
947            assert_eq!(Some(3), it.next());
948            assert_eq!(Some(4), it.next());
949            assert_eq!(None, it.next());
950            assert_eq!(None, it.next_back());
951        }
952
953        bitset.insert_range(700..=701);
954        {
955            let mut it = bitset.iter();
956            assert_eq!(Some(1), it.next());
957            assert_eq!(Some(701), it.next_back());
958            assert_eq!(Some(700), it.next_back());
959            assert_eq!(Some(6), it.next_back());
960            assert_eq!(Some(5), it.next_back());
961            assert_eq!(Some(2), it.next());
962            assert_eq!(Some(3), it.next());
963            assert_eq!(Some(4), it.next());
964            assert_eq!(None, it.next());
965            assert_eq!(None, it.next_back());
966        }
967
968        let v: Vec<u32> = bitset.iter().rev().collect();
969        assert_eq!(vec![701, 700, 6, 5, 4, 3, 2, 1], v);
970    }
971
972    #[test]
973    fn iter_from() {
974        let mut bitset = U32Set::empty();
975        bitset.extend([5, 7, 10, 1250, 1300, 3001]);
976
977        assert_eq!(
978            bitset.iter_from(0).collect::<Vec<u32>>(),
979            vec![5, 7, 10, 1250, 1300, 3001]
980        );
981
982        assert_eq!(
983            bitset.iter_from(4).collect::<Vec<u32>>(),
984            vec![5, 7, 10, 1250, 1300, 3001]
985        );
986        assert_eq!(
987            bitset.iter_from(5).collect::<Vec<u32>>(),
988            vec![5, 7, 10, 1250, 1300, 3001]
989        );
990        assert_eq!(
991            bitset.iter_from(6).collect::<Vec<u32>>(),
992            vec![7, 10, 1250, 1300, 3001]
993        );
994
995        assert_eq!(
996            bitset.iter_from(10).collect::<Vec<u32>>(),
997            vec![10, 1250, 1300, 3001]
998        );
999
1000        assert_eq!(
1001            bitset.iter_from(700).collect::<Vec<u32>>(),
1002            vec![1250, 1300, 3001]
1003        );
1004
1005        assert_eq!(
1006            bitset.iter_from(1250).collect::<Vec<u32>>(),
1007            vec![1250, 1300, 3001]
1008        );
1009        assert_eq!(
1010            bitset.iter_from(1251).collect::<Vec<u32>>(),
1011            vec![1300, 3001]
1012        );
1013
1014        assert_eq!(bitset.iter_from(3000).collect::<Vec<u32>>(), vec![3001]);
1015        assert_eq!(bitset.iter_from(3001).collect::<Vec<u32>>(), vec![3001]);
1016        assert_eq!(bitset.iter_from(3002).count(), 0);
1017        assert_eq!(bitset.iter_from(5000).count(), 0);
1018        assert_eq!(bitset.iter_from(u32::MAX).count(), 0);
1019
1020        bitset.insert(u32::MAX);
1021        assert_eq!(
1022            bitset.iter_from(u32::MAX).collect::<Vec<u32>>(),
1023            vec![u32::MAX]
1024        );
1025        assert_eq!(
1026            bitset.iter_from(u32::MAX - 1).collect::<Vec<u32>>(),
1027            vec![u32::MAX]
1028        );
1029
1030        let mut bitset = U32Set::empty();
1031        bitset.extend([510, 511, 512]);
1032
1033        assert_eq!(
1034            bitset.iter_from(509).collect::<Vec<u32>>(),
1035            vec![510, 511, 512]
1036        );
1037        assert_eq!(
1038            bitset.iter_from(510).collect::<Vec<u32>>(),
1039            vec![510, 511, 512]
1040        );
1041        assert_eq!(bitset.iter_from(511).collect::<Vec<u32>>(), vec![511, 512]);
1042        assert_eq!(bitset.iter_from(512).collect::<Vec<u32>>(), vec![512]);
1043        assert!(bitset.iter_from(513).collect::<Vec<u32>>().is_empty());
1044    }
1045
1046    #[test]
1047    fn extend() {
1048        let values = [3, 8, 534, 700, 10000, 10001, 10002];
1049        let values_unsorted = [10000, 3, 534, 700, 8, 10001, 10002];
1050
1051        let mut s1 = U32Set::empty();
1052        let mut s2 = U32Set::empty();
1053        let mut s3 = U32Set::empty();
1054        let mut s4 = U32Set::empty();
1055        assert_eq!(s1.len(), 0);
1056
1057        s1.extend(values.iter().copied());
1058        s2.extend_unsorted(values.iter().copied());
1059        s3.extend(values_unsorted.iter().copied());
1060        s4.extend_unsorted(values_unsorted.iter().copied());
1061
1062        assert_eq!(s1.iter().collect::<Vec<u32>>(), values);
1063        assert_eq!(s2.iter().collect::<Vec<u32>>(), values);
1064        assert_eq!(s3.iter().collect::<Vec<u32>>(), values);
1065        assert_eq!(s4.iter().collect::<Vec<u32>>(), values);
1066
1067        assert_eq!(s1.len(), 7);
1068        assert_eq!(s2.len(), 7);
1069        assert_eq!(s3.len(), 7);
1070        assert_eq!(s4.len(), 7);
1071    }
1072
1073    #[test]
1074    fn insert_unordered() {
1075        let mut bitset = U32Set::empty();
1076
1077        assert!(!bitset.contains(0));
1078        assert!(!bitset.contains(768));
1079        assert!(!bitset.contains(1678));
1080
1081        assert!(bitset.insert(0));
1082        assert!(bitset.insert(1678));
1083        assert!(bitset.insert(768));
1084
1085        assert!(bitset.contains(0));
1086        assert!(bitset.contains(768));
1087        assert!(bitset.contains(1678));
1088
1089        assert!(!bitset.contains(1));
1090        assert!(!bitset.contains(769));
1091        assert!(!bitset.contains(1679));
1092
1093        assert_eq!(bitset.len(), 3);
1094    }
1095
1096    #[test]
1097    fn remove() {
1098        let mut bitset = U32Set::empty();
1099
1100        assert!(bitset.insert(0));
1101        assert!(bitset.insert(511));
1102        assert!(bitset.insert(512));
1103        assert!(bitset.insert(1678));
1104        assert!(bitset.insert(768));
1105
1106        assert_eq!(bitset.len(), 5);
1107
1108        assert!(!bitset.remove(12));
1109        assert!(bitset.remove(511));
1110        assert!(bitset.remove(512));
1111        assert!(!bitset.remove(512));
1112
1113        assert_eq!(bitset.len(), 3);
1114        assert!(bitset.contains(0));
1115        assert!(!bitset.contains(511));
1116        assert!(!bitset.contains(512));
1117    }
1118
1119    #[test]
1120    fn remove_all() {
1121        let mut bitset = U32Set::empty();
1122        bitset.extend([5, 7, 11, 18, 620, 2000]);
1123
1124        assert_eq!(bitset.len(), 6);
1125
1126        bitset.remove_all([7, 11, 13, 18, 620]);
1127        assert_eq!(bitset.len(), 2);
1128        assert_eq!(bitset.iter().collect::<Vec<u32>>(), vec![5, 2000]);
1129    }
1130
1131    #[test]
1132    fn remove_range() {
1133        let mut bitset = U32Set::empty();
1134        bitset.extend([5, 7, 11, 18, 511, 620, 1023, 1024, 1200]);
1135        assert_eq!(bitset.len(), 9);
1136        bitset.remove_range(7..=620);
1137        assert_eq!(bitset.len(), 4);
1138        assert_eq!(
1139            bitset.iter().collect::<Vec<u32>>(),
1140            vec![5, 1023, 1024, 1200]
1141        );
1142
1143        let mut bitset = U32Set::empty();
1144        bitset.extend([5, 7, 11, 18, 511, 620, 1023, 1024, 1200]);
1145        bitset.remove_range(7..=1024);
1146        assert_eq!(bitset.len(), 2);
1147        assert_eq!(bitset.iter().collect::<Vec<u32>>(), vec![5, 1200]);
1148
1149        let mut bitset = U32Set::empty();
1150        bitset.extend([5, 7, 11, 18, 511, 620, 1023, 1024, 1200]);
1151        bitset.remove_range(2000..=2100);
1152        assert_eq!(bitset.len(), 9);
1153        assert_eq!(
1154            bitset.iter().collect::<Vec<u32>>(),
1155            vec![5, 7, 11, 18, 511, 620, 1023, 1024, 1200]
1156        );
1157
1158        // Remove all from one page
1159        let mut bitset = U32Set::empty();
1160        bitset.extend([1001, 1002, 1003, 1004]);
1161        bitset.remove_range(1002..=1003);
1162        assert!(bitset.contains(1001));
1163        assert!(!bitset.contains(1002));
1164        assert!(!bitset.contains(1003));
1165        assert!(bitset.contains(1004));
1166
1167        bitset.remove_range(100..=200);
1168        assert!(bitset.contains(1001));
1169        assert!(!bitset.contains(1002));
1170        assert!(!bitset.contains(1003));
1171        assert!(bitset.contains(1004));
1172
1173        bitset.remove_range(100..=1001);
1174        assert!(!bitset.contains(1001));
1175        assert!(!bitset.contains(1002));
1176        assert!(!bitset.contains(1003));
1177        assert!(bitset.contains(1004));
1178    }
1179
1180    #[test]
1181    fn remove_range_boundary() {
1182        let mut set = U32Set::empty();
1183
1184        set.remove_range(u32::MAX - 10..=u32::MAX);
1185        assert!(!set.contains(u32::MAX));
1186        set.insert_range(u32::MAX - 10..=u32::MAX);
1187        assert!(set.contains(u32::MAX));
1188        set.remove_range(u32::MAX - 10..=u32::MAX);
1189        assert!(!set.contains(u32::MAX));
1190
1191        set.remove_range(0..=10);
1192        assert!(!set.contains(0));
1193        set.insert_range(0..=10);
1194        assert!(set.contains(0));
1195        set.remove_range(0..=10);
1196        assert!(!set.contains(0));
1197    }
1198
1199    #[test]
1200    fn remove_to_empty_page() {
1201        let mut bitset = U32Set::empty();
1202
1203        bitset.insert(793);
1204        bitset.insert(43);
1205        bitset.remove(793);
1206
1207        assert!(bitset.contains(43));
1208        assert!(!bitset.contains(793));
1209        assert_eq!(bitset.len(), 1);
1210    }
1211
1212    #[test]
1213    fn insert_max_value() {
1214        let mut bitset = U32Set::empty();
1215        assert!(!bitset.contains(u32::MAX));
1216        assert!(bitset.insert(u32::MAX));
1217        assert!(bitset.contains(u32::MAX));
1218        assert!(!bitset.contains(u32::MAX - 1));
1219        assert_eq!(bitset.len(), 1);
1220    }
1221
1222    #[test]
1223    fn contains_index_cache() {
1224        let mut bitset = U32Set::from_iter([10, 11, 12, 2023]);
1225        // contains() internally uses a cache of last page index
1226        // ensure that outward contains() returns are unnaffected
1227        // by the ordering of calls.
1228        assert!(!bitset.contains(9));
1229        assert!(bitset.contains(10));
1230        assert!(bitset.contains(11));
1231        assert!(bitset.contains(12));
1232
1233        assert!(!bitset.contains(1200));
1234        assert!(!bitset.contains(2022));
1235        assert!(bitset.contains(2023));
1236        assert!(!bitset.contains(2024));
1237
1238        assert!(bitset.contains(2023));
1239        assert!(bitset.contains(11));
1240
1241        assert!(!bitset.contains(5000));
1242        assert!(bitset.contains(11));
1243        assert!(bitset.contains(2023));
1244        assert!(bitset.contains(12));
1245        assert!(!bitset.contains(2024));
1246        assert!(!bitset.contains(13));
1247
1248        // Caching should also work correctly if the page map is modified between lookups
1249        bitset.clear();
1250        bitset.insert(2024);
1251        bitset.insert(13);
1252
1253        assert!(bitset.contains(13));
1254        assert!(!bitset.contains(12));
1255
1256        assert!(bitset.contains(2024));
1257        assert!(!bitset.contains(2023));
1258    }
1259
1260    fn check_process<A, B, C, Op>(a: A, b: B, expected: C, op: Op)
1261    where
1262        A: IntoIterator<Item = u32>,
1263        B: IntoIterator<Item = u32>,
1264        C: IntoIterator<Item = u32>,
1265        Op: Fn(&mut U32Set, &U32Set),
1266    {
1267        let mut result = U32Set::from_iter(a);
1268        let b_set = U32Set::from_iter(b);
1269        let expected_set = U32Set::from_iter(expected);
1270        result.len();
1271
1272        op(&mut result, &b_set);
1273        assert_eq!(result, expected_set);
1274        assert_eq!(result.len(), expected_set.len());
1275    }
1276
1277    #[test]
1278    fn union() {
1279        check_process([], [5], [5], |a, b| a.union(b));
1280        check_process([128], [5], [128, 5], |a, b| a.union(b));
1281        check_process([128], [], [128], |a, b| a.union(b));
1282        check_process([1280], [5], [5, 1280], |a, b| a.union(b));
1283        check_process([5], [1280], [5, 1280], |a, b| a.union(b));
1284    }
1285
1286    #[test]
1287    fn intersect() {
1288        check_process([], [5], [], |a, b| a.intersect(b));
1289        check_process([5], [], [], |a, b| a.intersect(b));
1290        check_process([1, 5, 9], [5, 7], [5], |a, b| a.intersect(b));
1291        check_process([1, 1000, 2000], [1000], [1000], |a, b| a.intersect(b));
1292        check_process([1000], [1, 1000, 2000], [1000], |a, b| a.intersect(b));
1293        check_process([1, 1000, 2000], [1000, 5000], [1000], |a, b| a.intersect(b));
1294    }
1295
1296    #[test]
1297    fn subtract() {
1298        check_process([], [5], [], |a, b| a.subtract(b));
1299        check_process([5], [], [5], |a, b| a.subtract(b));
1300        check_process([5, 1000], [1000], [5], |a, b| a.subtract(b));
1301        check_process([5, 1000], [5], [1000], |a, b| a.subtract(b));
1302    }
1303
1304    #[test]
1305    fn reversed_subtract() {
1306        check_process([], [5], [5], |a, b| a.reversed_subtract(b));
1307        check_process([5], [], [], |a, b| a.reversed_subtract(b));
1308        check_process([1000], [5, 1000], [5], |a, b| a.reversed_subtract(b));
1309        check_process([5], [5, 1000], [1000], |a, b| a.reversed_subtract(b));
1310    }
1311
1312    fn set_for_range(first: u32, last: u32) -> U32Set {
1313        let mut set = U32Set::empty();
1314        for i in first..=last {
1315            set.insert(i);
1316        }
1317        set
1318    }
1319
1320    #[test]
1321    fn insert_range() {
1322        for range in [
1323            (0, 0),
1324            (0, 364),
1325            (0, 511),
1326            (512, 1023),
1327            (0, 1023),
1328            (364, 700),
1329            (364, 6000),
1330        ] {
1331            let mut set = U32Set::empty();
1332            set.len();
1333            set.insert_range(range.0..=range.1);
1334            assert_eq!(set, set_for_range(range.0, range.1), "{range:?}");
1335            assert_eq!(set.len(), (range.1 - range.0 + 1) as u64, "{range:?}");
1336        }
1337    }
1338
1339    #[test]
1340    fn insert_range_on_existing() {
1341        let mut set = U32Set::empty();
1342        set.insert(700);
1343        set.insert(2000);
1344        set.insert_range(32..=4000);
1345        assert_eq!(set, set_for_range(32, 4000));
1346        assert_eq!(set.len(), 4000 - 32 + 1);
1347    }
1348
1349    #[test]
1350    fn insert_range_max() {
1351        let mut set = U32Set::empty();
1352        set.insert_range(u32::MAX..=u32::MAX);
1353        assert!(set.contains(u32::MAX));
1354        assert_eq!(set.len(), 1);
1355    }
1356
1357    #[test]
1358    fn clear() {
1359        let mut bitset = U32Set::empty();
1360
1361        bitset.insert(13);
1362        bitset.insert(670);
1363        assert!(bitset.contains(13));
1364        assert!(bitset.contains(670));
1365
1366        bitset.clear();
1367        assert!(!bitset.contains(13));
1368        assert!(!bitset.contains(670));
1369        assert_eq!(bitset.len(), 0);
1370    }
1371
1372    #[test]
1373    fn hash_and_eq() {
1374        let mut bitset1 = U32Set::empty();
1375        let mut bitset2 = U32Set::empty();
1376        let mut bitset3 = U32Set::empty();
1377        let mut bitset4 = U32Set::empty();
1378
1379        bitset1.insert(43);
1380        bitset1.insert(793);
1381
1382        bitset2.insert(793);
1383        bitset2.insert(43);
1384        bitset2.len();
1385
1386        bitset3.insert(43);
1387        bitset3.insert(793);
1388        bitset3.insert(794);
1389
1390        bitset4.insert(0);
1391
1392        assert_eq!(U32Set::empty(), U32Set::empty());
1393        assert_eq!(bitset1, bitset2);
1394        assert_ne!(bitset1, bitset3);
1395        assert_ne!(bitset2, bitset3);
1396        assert_eq!(bitset4, bitset4);
1397
1398        let set = HashSet::from([bitset1]);
1399        assert!(set.contains(&bitset2));
1400        assert!(!set.contains(&bitset3));
1401    }
1402
1403    #[test]
1404    fn hash_and_eq_with_empty_pages() {
1405        let mut bitset1 = U32Set::empty();
1406        let mut bitset2 = U32Set::empty();
1407        let mut bitset3 = U32Set::empty();
1408
1409        bitset1.insert(43);
1410
1411        bitset2.insert(793);
1412        bitset2.insert(43);
1413        bitset2.remove(793);
1414
1415        bitset3.insert(43);
1416        bitset3.insert(793);
1417
1418        assert_eq!(bitset1, bitset2);
1419        assert_ne!(bitset1, bitset3);
1420
1421        let set = HashSet::from([bitset1]);
1422        assert!(set.contains(&bitset2));
1423    }
1424
1425    #[test]
1426    fn hash_and_eq_ignore_cache() {
1427        let bitset1 = U32Set::from_iter([5, 1027]);
1428        let bitset2 = U32Set::from_iter([5, 1027]);
1429
1430        // Modify the internal last page index cache to point at different pages.
1431        assert!(bitset1.contains(1027));
1432        assert!(bitset2.contains(5));
1433
1434        // Hash, eq, cmp should be unnaffected:
1435        assert_eq!(bitset1, bitset2);
1436        assert!(matches!(bitset1.cmp(&bitset2), Ordering::Equal));
1437        let set = HashSet::from([bitset1]);
1438        assert!(set.contains(&bitset2));
1439    }
1440
1441    #[test]
1442    fn ordering() {
1443        macro_rules! assert_ord {
1444            ($lhs:expr, $rhs:expr, $ord:path) => {
1445                assert_eq!(
1446                    U32Set::from_iter($lhs).cmp(&U32Set::from_iter($rhs)),
1447                    $ord,
1448                    "{:?}, {:?}",
1449                    $lhs,
1450                    $rhs
1451                )
1452            };
1453        }
1454
1455        const EMPTY: [u32; 0] = [];
1456        assert_ord!(EMPTY, EMPTY, Ordering::Equal);
1457        assert_ord!(EMPTY, [0], Ordering::Less);
1458        assert_ord!([0], [0], Ordering::Equal);
1459        assert_ord!([0, 1, 2], [1, 2, 3], Ordering::Less);
1460        assert_ord!([0, 1, 4], [1, 2, 3], Ordering::Less);
1461        assert_ord!([1, 2, 3], [0, 2, 4], Ordering::Greater);
1462        assert_ord!([5, 4, 0], [1, 2, 3], Ordering::Less); // out of order
1463        assert_ord!([1, 2, 3], [1, 2, 3, 4], Ordering::Less); // out of order
1464        assert_ord!([2, 3, 4], [1, 2, 3, 4, 5], Ordering::Greater); // out of order
1465
1466        assert_ord!([1000, 2000, 3000], [1000, 2000, 3000, 4000], Ordering::Less); // out of order
1467        assert_ord!([1000, 1001,], [1000, 2000], Ordering::Less); // out of order
1468        assert_ord!(
1469            [2000, 3000, 4000],
1470            [1000, 2000, 3000, 4000, 5000],
1471            Ordering::Greater
1472        ); // out of order
1473    }
1474}