naga/
arena.rs

1use std::{cmp::Ordering, fmt, hash, marker::PhantomData, num::NonZeroU32, ops};
2
3/// An unique index in the arena array that a handle points to.
4/// The "non-zero" part ensures that an `Option<Handle<T>>` has
5/// the same size and representation as `Handle<T>`.
6type Index = NonZeroU32;
7
8use crate::{FastIndexSet, Span};
9
10#[derive(Clone, Copy, Debug, thiserror::Error, PartialEq)]
11#[error("Handle {index} of {kind} is either not present, or inaccessible yet")]
12pub struct BadHandle {
13    pub kind: &'static str,
14    pub index: usize,
15}
16
17impl BadHandle {
18    fn new<T>(handle: Handle<T>) -> Self {
19        Self {
20            kind: std::any::type_name::<T>(),
21            index: handle.index(),
22        }
23    }
24}
25
26/// A strongly typed reference to an arena item.
27///
28/// A `Handle` value can be used as an index into an [`Arena`] or [`UniqueArena`].
29#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
30#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
31#[cfg_attr(
32    any(feature = "serialize", feature = "deserialize"),
33    serde(transparent)
34)]
35#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
36pub struct Handle<T> {
37    index: Index,
38    #[cfg_attr(any(feature = "serialize", feature = "deserialize"), serde(skip))]
39    marker: PhantomData<T>,
40}
41
42impl<T> Clone for Handle<T> {
43    fn clone(&self) -> Self {
44        *self
45    }
46}
47
48impl<T> Copy for Handle<T> {}
49
50impl<T> PartialEq for Handle<T> {
51    fn eq(&self, other: &Self) -> bool {
52        self.index == other.index
53    }
54}
55
56impl<T> Eq for Handle<T> {}
57
58impl<T> PartialOrd for Handle<T> {
59    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
60        Some(self.cmp(other))
61    }
62}
63
64impl<T> Ord for Handle<T> {
65    fn cmp(&self, other: &Self) -> Ordering {
66        self.index.cmp(&other.index)
67    }
68}
69
70impl<T> fmt::Debug for Handle<T> {
71    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
72        write!(formatter, "[{}]", self.index)
73    }
74}
75
76impl<T> hash::Hash for Handle<T> {
77    fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
78        self.index.hash(hasher)
79    }
80}
81
82impl<T> Handle<T> {
83    #[cfg(test)]
84    pub const DUMMY: Self = Handle {
85        index: unsafe { NonZeroU32::new_unchecked(u32::MAX) },
86        marker: PhantomData,
87    };
88
89    pub(crate) const fn new(index: Index) -> Self {
90        Handle {
91            index,
92            marker: PhantomData,
93        }
94    }
95
96    /// Returns the zero-based index of this handle.
97    pub const fn index(self) -> usize {
98        let index = self.index.get() - 1;
99        index as usize
100    }
101
102    /// Convert a `usize` index into a `Handle<T>`.
103    fn from_usize(index: usize) -> Self {
104        let handle_index = u32::try_from(index + 1)
105            .ok()
106            .and_then(Index::new)
107            .expect("Failed to insert into arena. Handle overflows");
108        Handle::new(handle_index)
109    }
110
111    /// Convert a `usize` index into a `Handle<T>`, without range checks.
112    const unsafe fn from_usize_unchecked(index: usize) -> Self {
113        Handle::new(Index::new_unchecked((index + 1) as u32))
114    }
115}
116
117/// A strongly typed range of handles.
118#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
119#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
120#[cfg_attr(
121    any(feature = "serialize", feature = "deserialize"),
122    serde(transparent)
123)]
124#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
125#[cfg_attr(test, derive(PartialEq))]
126pub struct Range<T> {
127    inner: ops::Range<u32>,
128    #[cfg_attr(any(feature = "serialize", feature = "deserialize"), serde(skip))]
129    marker: PhantomData<T>,
130}
131
132impl<T> Range<T> {
133    pub(crate) const fn erase_type(self) -> Range<()> {
134        let Self { inner, marker: _ } = self;
135        Range {
136            inner,
137            marker: PhantomData,
138        }
139    }
140}
141
142// NOTE: Keep this diagnostic in sync with that of [`BadHandle`].
143#[derive(Clone, Debug, thiserror::Error)]
144#[cfg_attr(test, derive(PartialEq))]
145#[error("Handle range {range:?} of {kind} is either not present, or inaccessible yet")]
146pub struct BadRangeError {
147    // This error is used for many `Handle` types, but there's no point in making this generic, so
148    // we just flatten them all to `Handle<()>` here.
149    kind: &'static str,
150    range: Range<()>,
151}
152
153impl BadRangeError {
154    pub fn new<T>(range: Range<T>) -> Self {
155        Self {
156            kind: std::any::type_name::<T>(),
157            range: range.erase_type(),
158        }
159    }
160}
161
162impl<T> Clone for Range<T> {
163    fn clone(&self) -> Self {
164        Range {
165            inner: self.inner.clone(),
166            marker: self.marker,
167        }
168    }
169}
170
171impl<T> fmt::Debug for Range<T> {
172    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
173        write!(formatter, "[{}..{}]", self.inner.start + 1, self.inner.end)
174    }
175}
176
177impl<T> Iterator for Range<T> {
178    type Item = Handle<T>;
179    fn next(&mut self) -> Option<Self::Item> {
180        if self.inner.start < self.inner.end {
181            self.inner.start += 1;
182            Some(Handle {
183                index: NonZeroU32::new(self.inner.start).unwrap(),
184                marker: self.marker,
185            })
186        } else {
187            None
188        }
189    }
190}
191
192impl<T> Range<T> {
193    /// Return a range enclosing handles `first` through `last`, inclusive.
194    pub fn new_from_bounds(first: Handle<T>, last: Handle<T>) -> Self {
195        Self {
196            inner: (first.index() as u32)..(last.index() as u32 + 1),
197            marker: Default::default(),
198        }
199    }
200
201    /// return the first and last handles included in `self`.
202    ///
203    /// If `self` is an empty range, there are no handles included, so
204    /// return `None`.
205    pub fn first_and_last(&self) -> Option<(Handle<T>, Handle<T>)> {
206        if self.inner.start < self.inner.end {
207            Some((
208                // `Range::new_from_bounds` expects a 1-based, start- and
209                // end-inclusive range, but `self.inner` is a zero-based,
210                // end-exclusive range.
211                Handle::new(Index::new(self.inner.start + 1).unwrap()),
212                Handle::new(Index::new(self.inner.end).unwrap()),
213            ))
214        } else {
215            None
216        }
217    }
218
219    /// Return the zero-based index range covered by `self`.
220    pub fn zero_based_index_range(&self) -> ops::Range<u32> {
221        self.inner.clone()
222    }
223
224    /// Construct a `Range` that covers the zero-based indices in `inner`.
225    pub fn from_zero_based_index_range(inner: ops::Range<u32>, arena: &Arena<T>) -> Self {
226        // Since `inner` is a `Range<u32>`, we only need to check that
227        // the start and end are well-ordered, and that the end fits
228        // within `arena`.
229        assert!(inner.start <= inner.end);
230        assert!(inner.end as usize <= arena.len());
231        Self {
232            inner,
233            marker: Default::default(),
234        }
235    }
236}
237
238/// An arena holding some kind of component (e.g., type, constant,
239/// instruction, etc.) that can be referenced.
240///
241/// Adding new items to the arena produces a strongly-typed [`Handle`].
242/// The arena can be indexed using the given handle to obtain
243/// a reference to the stored item.
244#[derive(Clone)]
245#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
246#[cfg_attr(feature = "serialize", serde(transparent))]
247#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
248#[cfg_attr(test, derive(PartialEq))]
249pub struct Arena<T> {
250    /// Values of this arena.
251    data: Vec<T>,
252    #[cfg_attr(feature = "serialize", serde(skip))]
253    span_info: Vec<Span>,
254}
255
256impl<T> Default for Arena<T> {
257    fn default() -> Self {
258        Self::new()
259    }
260}
261
262impl<T: fmt::Debug> fmt::Debug for Arena<T> {
263    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
264        f.debug_map().entries(self.iter()).finish()
265    }
266}
267
268impl<T> Arena<T> {
269    /// Create a new arena with no initial capacity allocated.
270    pub const fn new() -> Self {
271        Arena {
272            data: Vec::new(),
273            span_info: Vec::new(),
274        }
275    }
276
277    /// Extracts the inner vector.
278    #[allow(clippy::missing_const_for_fn)] // ignore due to requirement of #![feature(const_precise_live_drops)]
279    pub fn into_inner(self) -> Vec<T> {
280        self.data
281    }
282
283    /// Returns the current number of items stored in this arena.
284    pub fn len(&self) -> usize {
285        self.data.len()
286    }
287
288    /// Returns `true` if the arena contains no elements.
289    pub fn is_empty(&self) -> bool {
290        self.data.is_empty()
291    }
292
293    /// Returns an iterator over the items stored in this arena, returning both
294    /// the item's handle and a reference to it.
295    pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Handle<T>, &T)> {
296        self.data
297            .iter()
298            .enumerate()
299            .map(|(i, v)| unsafe { (Handle::from_usize_unchecked(i), v) })
300    }
301
302    /// Drains the arena, returning an iterator over the items stored.
303    pub fn drain(&mut self) -> impl DoubleEndedIterator<Item = (Handle<T>, T, Span)> {
304        let arena = std::mem::take(self);
305        arena
306            .data
307            .into_iter()
308            .zip(arena.span_info)
309            .enumerate()
310            .map(|(i, (v, span))| unsafe { (Handle::from_usize_unchecked(i), v, span) })
311    }
312
313    /// Returns a iterator over the items stored in this arena,
314    /// returning both the item's handle and a mutable reference to it.
315    pub fn iter_mut(&mut self) -> impl DoubleEndedIterator<Item = (Handle<T>, &mut T)> {
316        self.data
317            .iter_mut()
318            .enumerate()
319            .map(|(i, v)| unsafe { (Handle::from_usize_unchecked(i), v) })
320    }
321
322    /// Adds a new value to the arena, returning a typed handle.
323    pub fn append(&mut self, value: T, span: Span) -> Handle<T> {
324        let index = self.data.len();
325        self.data.push(value);
326        self.span_info.push(span);
327        Handle::from_usize(index)
328    }
329
330    /// Fetch a handle to an existing type.
331    pub fn fetch_if<F: Fn(&T) -> bool>(&self, fun: F) -> Option<Handle<T>> {
332        self.data
333            .iter()
334            .position(fun)
335            .map(|index| unsafe { Handle::from_usize_unchecked(index) })
336    }
337
338    /// Adds a value with a custom check for uniqueness:
339    /// returns a handle pointing to
340    /// an existing element if the check succeeds, or adds a new
341    /// element otherwise.
342    pub fn fetch_if_or_append<F: Fn(&T, &T) -> bool>(
343        &mut self,
344        value: T,
345        span: Span,
346        fun: F,
347    ) -> Handle<T> {
348        if let Some(index) = self.data.iter().position(|d| fun(d, &value)) {
349            unsafe { Handle::from_usize_unchecked(index) }
350        } else {
351            self.append(value, span)
352        }
353    }
354
355    /// Adds a value with a check for uniqueness, where the check is plain comparison.
356    pub fn fetch_or_append(&mut self, value: T, span: Span) -> Handle<T>
357    where
358        T: PartialEq,
359    {
360        self.fetch_if_or_append(value, span, T::eq)
361    }
362
363    pub fn try_get(&self, handle: Handle<T>) -> Result<&T, BadHandle> {
364        self.data
365            .get(handle.index())
366            .ok_or_else(|| BadHandle::new(handle))
367    }
368
369    /// Get a mutable reference to an element in the arena.
370    pub fn get_mut(&mut self, handle: Handle<T>) -> &mut T {
371        self.data.get_mut(handle.index()).unwrap()
372    }
373
374    /// Get the range of handles from a particular number of elements to the end.
375    pub fn range_from(&self, old_length: usize) -> Range<T> {
376        Range {
377            inner: old_length as u32..self.data.len() as u32,
378            marker: PhantomData,
379        }
380    }
381
382    /// Clears the arena keeping all allocations
383    pub fn clear(&mut self) {
384        self.data.clear()
385    }
386
387    pub fn get_span(&self, handle: Handle<T>) -> Span {
388        *self
389            .span_info
390            .get(handle.index())
391            .unwrap_or(&Span::default())
392    }
393
394    /// Assert that `handle` is valid for this arena.
395    pub fn check_contains_handle(&self, handle: Handle<T>) -> Result<(), BadHandle> {
396        if handle.index() < self.data.len() {
397            Ok(())
398        } else {
399            Err(BadHandle::new(handle))
400        }
401    }
402
403    /// Assert that `range` is valid for this arena.
404    pub fn check_contains_range(&self, range: &Range<T>) -> Result<(), BadRangeError> {
405        // Since `range.inner` is a `Range<u32>`, we only need to check that the
406        // start precedes the end, and that the end is in range.
407        if range.inner.start > range.inner.end {
408            return Err(BadRangeError::new(range.clone()));
409        }
410
411        // Empty ranges are tolerated: they can be produced by compaction.
412        if range.inner.start == range.inner.end {
413            return Ok(());
414        }
415
416        // `range.inner` is zero-based, but end-exclusive, so `range.inner.end`
417        // is actually the right one-based index for the last handle within the
418        // range.
419        let last_handle = Handle::new(range.inner.end.try_into().unwrap());
420        if self.check_contains_handle(last_handle).is_err() {
421            return Err(BadRangeError::new(range.clone()));
422        }
423
424        Ok(())
425    }
426
427    #[cfg(feature = "compact")]
428    pub(crate) fn retain_mut<P>(&mut self, mut predicate: P)
429    where
430        P: FnMut(Handle<T>, &mut T) -> bool,
431    {
432        let mut index = 0;
433        let mut retained = 0;
434        self.data.retain_mut(|elt| {
435            let handle = Handle::new(Index::new(index as u32 + 1).unwrap());
436            let keep = predicate(handle, elt);
437
438            // Since `predicate` needs mutable access to each element,
439            // we can't feasibly call it twice, so we have to compact
440            // spans by hand in parallel as part of this iteration.
441            if keep {
442                self.span_info[retained] = self.span_info[index];
443                retained += 1;
444            }
445
446            index += 1;
447            keep
448        });
449
450        self.span_info.truncate(retained);
451    }
452}
453
454#[cfg(feature = "deserialize")]
455impl<'de, T> serde::Deserialize<'de> for Arena<T>
456where
457    T: serde::Deserialize<'de>,
458{
459    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
460    where
461        D: serde::Deserializer<'de>,
462    {
463        let data = Vec::deserialize(deserializer)?;
464        let span_info = std::iter::repeat(Span::default())
465            .take(data.len())
466            .collect();
467
468        Ok(Self { data, span_info })
469    }
470}
471
472impl<T> ops::Index<Handle<T>> for Arena<T> {
473    type Output = T;
474    fn index(&self, handle: Handle<T>) -> &T {
475        &self.data[handle.index()]
476    }
477}
478
479impl<T> ops::IndexMut<Handle<T>> for Arena<T> {
480    fn index_mut(&mut self, handle: Handle<T>) -> &mut T {
481        &mut self.data[handle.index()]
482    }
483}
484
485impl<T> ops::Index<Range<T>> for Arena<T> {
486    type Output = [T];
487    fn index(&self, range: Range<T>) -> &[T] {
488        &self.data[range.inner.start as usize..range.inner.end as usize]
489    }
490}
491
492#[cfg(test)]
493mod tests {
494    use super::*;
495
496    #[test]
497    fn append_non_unique() {
498        let mut arena: Arena<u8> = Arena::new();
499        let t1 = arena.append(0, Default::default());
500        let t2 = arena.append(0, Default::default());
501        assert!(t1 != t2);
502        assert!(arena[t1] == arena[t2]);
503    }
504
505    #[test]
506    fn append_unique() {
507        let mut arena: Arena<u8> = Arena::new();
508        let t1 = arena.append(0, Default::default());
509        let t2 = arena.append(1, Default::default());
510        assert!(t1 != t2);
511        assert!(arena[t1] != arena[t2]);
512    }
513
514    #[test]
515    fn fetch_or_append_non_unique() {
516        let mut arena: Arena<u8> = Arena::new();
517        let t1 = arena.fetch_or_append(0, Default::default());
518        let t2 = arena.fetch_or_append(0, Default::default());
519        assert!(t1 == t2);
520        assert!(arena[t1] == arena[t2])
521    }
522
523    #[test]
524    fn fetch_or_append_unique() {
525        let mut arena: Arena<u8> = Arena::new();
526        let t1 = arena.fetch_or_append(0, Default::default());
527        let t2 = arena.fetch_or_append(1, Default::default());
528        assert!(t1 != t2);
529        assert!(arena[t1] != arena[t2]);
530    }
531}
532
533/// An arena whose elements are guaranteed to be unique.
534///
535/// A `UniqueArena` holds a set of unique values of type `T`, each with an
536/// associated [`Span`]. Inserting a value returns a `Handle<T>`, which can be
537/// used to index the `UniqueArena` and obtain shared access to the `T` element.
538/// Access via a `Handle` is an array lookup - no hash lookup is necessary.
539///
540/// The element type must implement `Eq` and `Hash`. Insertions of equivalent
541/// elements, according to `Eq`, all return the same `Handle`.
542///
543/// Once inserted, elements may not be mutated.
544///
545/// `UniqueArena` is similar to [`Arena`]: If `Arena` is vector-like,
546/// `UniqueArena` is `HashSet`-like.
547#[derive(Clone)]
548pub struct UniqueArena<T> {
549    set: FastIndexSet<T>,
550
551    /// Spans for the elements, indexed by handle.
552    ///
553    /// The length of this vector is always equal to `set.len()`. `FastIndexSet`
554    /// promises that its elements "are indexed in a compact range, without
555    /// holes in the range 0..set.len()", so we can always use the indices
556    /// returned by insertion as indices into this vector.
557    span_info: Vec<Span>,
558}
559
560impl<T> UniqueArena<T> {
561    /// Create a new arena with no initial capacity allocated.
562    pub fn new() -> Self {
563        UniqueArena {
564            set: FastIndexSet::default(),
565            span_info: Vec::new(),
566        }
567    }
568
569    /// Return the current number of items stored in this arena.
570    pub fn len(&self) -> usize {
571        self.set.len()
572    }
573
574    /// Return `true` if the arena contains no elements.
575    pub fn is_empty(&self) -> bool {
576        self.set.is_empty()
577    }
578
579    /// Clears the arena, keeping all allocations.
580    pub fn clear(&mut self) {
581        self.set.clear();
582        self.span_info.clear();
583    }
584
585    /// Return the span associated with `handle`.
586    ///
587    /// If a value has been inserted multiple times, the span returned is the
588    /// one provided with the first insertion.
589    pub fn get_span(&self, handle: Handle<T>) -> Span {
590        *self
591            .span_info
592            .get(handle.index())
593            .unwrap_or(&Span::default())
594    }
595
596    #[cfg(feature = "compact")]
597    pub(crate) fn drain_all(&mut self) -> UniqueArenaDrain<T> {
598        UniqueArenaDrain {
599            inner_elts: self.set.drain(..),
600            inner_spans: self.span_info.drain(..),
601            index: Index::new(1).unwrap(),
602        }
603    }
604}
605
606#[cfg(feature = "compact")]
607pub(crate) struct UniqueArenaDrain<'a, T> {
608    inner_elts: indexmap::set::Drain<'a, T>,
609    inner_spans: std::vec::Drain<'a, Span>,
610    index: Index,
611}
612
613#[cfg(feature = "compact")]
614impl<'a, T> Iterator for UniqueArenaDrain<'a, T> {
615    type Item = (Handle<T>, T, Span);
616
617    fn next(&mut self) -> Option<Self::Item> {
618        match self.inner_elts.next() {
619            Some(elt) => {
620                let handle = Handle::new(self.index);
621                self.index = self.index.checked_add(1).unwrap();
622                let span = self.inner_spans.next().unwrap();
623                Some((handle, elt, span))
624            }
625            None => None,
626        }
627    }
628}
629
630impl<T: Eq + hash::Hash> UniqueArena<T> {
631    /// Returns an iterator over the items stored in this arena, returning both
632    /// the item's handle and a reference to it.
633    pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Handle<T>, &T)> {
634        self.set.iter().enumerate().map(|(i, v)| {
635            let position = i + 1;
636            let index = unsafe { Index::new_unchecked(position as u32) };
637            (Handle::new(index), v)
638        })
639    }
640
641    /// Insert a new value into the arena.
642    ///
643    /// Return a [`Handle<T>`], which can be used to index this arena to get a
644    /// shared reference to the element.
645    ///
646    /// If this arena already contains an element that is `Eq` to `value`,
647    /// return a `Handle` to the existing element, and drop `value`.
648    ///
649    /// If `value` is inserted into the arena, associate `span` with
650    /// it. An element's span can be retrieved with the [`get_span`]
651    /// method.
652    ///
653    /// [`Handle<T>`]: Handle
654    /// [`get_span`]: UniqueArena::get_span
655    pub fn insert(&mut self, value: T, span: Span) -> Handle<T> {
656        let (index, added) = self.set.insert_full(value);
657
658        if added {
659            debug_assert!(index == self.span_info.len());
660            self.span_info.push(span);
661        }
662
663        debug_assert!(self.set.len() == self.span_info.len());
664
665        Handle::from_usize(index)
666    }
667
668    /// Replace an old value with a new value.
669    ///
670    /// # Panics
671    ///
672    /// - if the old value is not in the arena
673    /// - if the new value already exists in the arena
674    pub fn replace(&mut self, old: Handle<T>, new: T) {
675        let (index, added) = self.set.insert_full(new);
676        assert!(added && index == self.set.len() - 1);
677
678        self.set.swap_remove_index(old.index()).unwrap();
679    }
680
681    /// Return this arena's handle for `value`, if present.
682    ///
683    /// If this arena already contains an element equal to `value`,
684    /// return its handle. Otherwise, return `None`.
685    pub fn get(&self, value: &T) -> Option<Handle<T>> {
686        self.set
687            .get_index_of(value)
688            .map(|index| unsafe { Handle::from_usize_unchecked(index) })
689    }
690
691    /// Return this arena's value at `handle`, if that is a valid handle.
692    pub fn get_handle(&self, handle: Handle<T>) -> Result<&T, BadHandle> {
693        self.set
694            .get_index(handle.index())
695            .ok_or_else(|| BadHandle::new(handle))
696    }
697
698    /// Assert that `handle` is valid for this arena.
699    pub fn check_contains_handle(&self, handle: Handle<T>) -> Result<(), BadHandle> {
700        if handle.index() < self.set.len() {
701            Ok(())
702        } else {
703            Err(BadHandle::new(handle))
704        }
705    }
706}
707
708impl<T> Default for UniqueArena<T> {
709    fn default() -> Self {
710        Self::new()
711    }
712}
713
714impl<T: fmt::Debug + Eq + hash::Hash> fmt::Debug for UniqueArena<T> {
715    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
716        f.debug_map().entries(self.iter()).finish()
717    }
718}
719
720impl<T> ops::Index<Handle<T>> for UniqueArena<T> {
721    type Output = T;
722    fn index(&self, handle: Handle<T>) -> &T {
723        &self.set[handle.index()]
724    }
725}
726
727#[cfg(feature = "serialize")]
728impl<T> serde::Serialize for UniqueArena<T>
729where
730    T: Eq + hash::Hash + serde::Serialize,
731{
732    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
733    where
734        S: serde::Serializer,
735    {
736        self.set.serialize(serializer)
737    }
738}
739
740#[cfg(feature = "deserialize")]
741impl<'de, T> serde::Deserialize<'de> for UniqueArena<T>
742where
743    T: Eq + hash::Hash + serde::Deserialize<'de>,
744{
745    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
746    where
747        D: serde::Deserializer<'de>,
748    {
749        let set = FastIndexSet::deserialize(deserializer)?;
750        let span_info = std::iter::repeat(Span::default()).take(set.len()).collect();
751
752        Ok(Self { set, span_info })
753    }
754}
755
756//Note: largely borrowed from `HashSet` implementation
757#[cfg(feature = "arbitrary")]
758impl<'a, T> arbitrary::Arbitrary<'a> for UniqueArena<T>
759where
760    T: Eq + hash::Hash + arbitrary::Arbitrary<'a>,
761{
762    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
763        let mut arena = Self::default();
764        for elem in u.arbitrary_iter()? {
765            arena.set.insert(elem?);
766            arena.span_info.push(Span::UNDEFINED);
767        }
768        Ok(arena)
769    }
770
771    fn arbitrary_take_rest(u: arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
772        let mut arena = Self::default();
773        for elem in u.arbitrary_take_rest_iter()? {
774            arena.set.insert(elem?);
775            arena.span_info.push(Span::UNDEFINED);
776        }
777        Ok(arena)
778    }
779
780    #[inline]
781    fn size_hint(depth: usize) -> (usize, Option<usize>) {
782        let depth_hint = <usize as arbitrary::Arbitrary>::size_hint(depth);
783        arbitrary::size_hint::and(depth_hint, (0, None))
784    }
785}