1use std::{cmp::Ordering, fmt, hash, marker::PhantomData, num::NonZeroU32, ops};
2
3type Index = NonZeroU32;
7
8use crate::{FastIndexSet, Span};
9
10#[derive(Clone, Copy, Debug, thiserror::Error, PartialEq)]
11#[error("Handle {index} of {kind} is either not present, or inaccessible yet")]
12pub struct BadHandle {
13 pub kind: &'static str,
14 pub index: usize,
15}
16
17impl BadHandle {
18 fn new<T>(handle: Handle<T>) -> Self {
19 Self {
20 kind: std::any::type_name::<T>(),
21 index: handle.index(),
22 }
23 }
24}
25
26#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
30#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
31#[cfg_attr(
32 any(feature = "serialize", feature = "deserialize"),
33 serde(transparent)
34)]
35#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
36pub struct Handle<T> {
37 index: Index,
38 #[cfg_attr(any(feature = "serialize", feature = "deserialize"), serde(skip))]
39 marker: PhantomData<T>,
40}
41
42impl<T> Clone for Handle<T> {
43 fn clone(&self) -> Self {
44 *self
45 }
46}
47
48impl<T> Copy for Handle<T> {}
49
50impl<T> PartialEq for Handle<T> {
51 fn eq(&self, other: &Self) -> bool {
52 self.index == other.index
53 }
54}
55
56impl<T> Eq for Handle<T> {}
57
58impl<T> PartialOrd for Handle<T> {
59 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
60 Some(self.cmp(other))
61 }
62}
63
64impl<T> Ord for Handle<T> {
65 fn cmp(&self, other: &Self) -> Ordering {
66 self.index.cmp(&other.index)
67 }
68}
69
70impl<T> fmt::Debug for Handle<T> {
71 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
72 write!(formatter, "[{}]", self.index)
73 }
74}
75
76impl<T> hash::Hash for Handle<T> {
77 fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
78 self.index.hash(hasher)
79 }
80}
81
82impl<T> Handle<T> {
83 #[cfg(test)]
84 pub const DUMMY: Self = Handle {
85 index: unsafe { NonZeroU32::new_unchecked(u32::MAX) },
86 marker: PhantomData,
87 };
88
89 pub(crate) const fn new(index: Index) -> Self {
90 Handle {
91 index,
92 marker: PhantomData,
93 }
94 }
95
96 pub const fn index(self) -> usize {
98 let index = self.index.get() - 1;
99 index as usize
100 }
101
102 fn from_usize(index: usize) -> Self {
104 let handle_index = u32::try_from(index + 1)
105 .ok()
106 .and_then(Index::new)
107 .expect("Failed to insert into arena. Handle overflows");
108 Handle::new(handle_index)
109 }
110
111 const unsafe fn from_usize_unchecked(index: usize) -> Self {
113 Handle::new(Index::new_unchecked((index + 1) as u32))
114 }
115}
116
117#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
119#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
120#[cfg_attr(
121 any(feature = "serialize", feature = "deserialize"),
122 serde(transparent)
123)]
124#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
125#[cfg_attr(test, derive(PartialEq))]
126pub struct Range<T> {
127 inner: ops::Range<u32>,
128 #[cfg_attr(any(feature = "serialize", feature = "deserialize"), serde(skip))]
129 marker: PhantomData<T>,
130}
131
132impl<T> Range<T> {
133 pub(crate) const fn erase_type(self) -> Range<()> {
134 let Self { inner, marker: _ } = self;
135 Range {
136 inner,
137 marker: PhantomData,
138 }
139 }
140}
141
142#[derive(Clone, Debug, thiserror::Error)]
144#[cfg_attr(test, derive(PartialEq))]
145#[error("Handle range {range:?} of {kind} is either not present, or inaccessible yet")]
146pub struct BadRangeError {
147 kind: &'static str,
150 range: Range<()>,
151}
152
153impl BadRangeError {
154 pub fn new<T>(range: Range<T>) -> Self {
155 Self {
156 kind: std::any::type_name::<T>(),
157 range: range.erase_type(),
158 }
159 }
160}
161
162impl<T> Clone for Range<T> {
163 fn clone(&self) -> Self {
164 Range {
165 inner: self.inner.clone(),
166 marker: self.marker,
167 }
168 }
169}
170
171impl<T> fmt::Debug for Range<T> {
172 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
173 write!(formatter, "[{}..{}]", self.inner.start + 1, self.inner.end)
174 }
175}
176
177impl<T> Iterator for Range<T> {
178 type Item = Handle<T>;
179 fn next(&mut self) -> Option<Self::Item> {
180 if self.inner.start < self.inner.end {
181 self.inner.start += 1;
182 Some(Handle {
183 index: NonZeroU32::new(self.inner.start).unwrap(),
184 marker: self.marker,
185 })
186 } else {
187 None
188 }
189 }
190}
191
192impl<T> Range<T> {
193 pub fn new_from_bounds(first: Handle<T>, last: Handle<T>) -> Self {
195 Self {
196 inner: (first.index() as u32)..(last.index() as u32 + 1),
197 marker: Default::default(),
198 }
199 }
200
201 pub fn first_and_last(&self) -> Option<(Handle<T>, Handle<T>)> {
206 if self.inner.start < self.inner.end {
207 Some((
208 Handle::new(Index::new(self.inner.start + 1).unwrap()),
212 Handle::new(Index::new(self.inner.end).unwrap()),
213 ))
214 } else {
215 None
216 }
217 }
218
219 pub fn zero_based_index_range(&self) -> ops::Range<u32> {
221 self.inner.clone()
222 }
223
224 pub fn from_zero_based_index_range(inner: ops::Range<u32>, arena: &Arena<T>) -> Self {
226 assert!(inner.start <= inner.end);
230 assert!(inner.end as usize <= arena.len());
231 Self {
232 inner,
233 marker: Default::default(),
234 }
235 }
236}
237
238#[derive(Clone)]
245#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
246#[cfg_attr(feature = "serialize", serde(transparent))]
247#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
248#[cfg_attr(test, derive(PartialEq))]
249pub struct Arena<T> {
250 data: Vec<T>,
252 #[cfg_attr(feature = "serialize", serde(skip))]
253 span_info: Vec<Span>,
254}
255
256impl<T> Default for Arena<T> {
257 fn default() -> Self {
258 Self::new()
259 }
260}
261
262impl<T: fmt::Debug> fmt::Debug for Arena<T> {
263 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
264 f.debug_map().entries(self.iter()).finish()
265 }
266}
267
268impl<T> Arena<T> {
269 pub const fn new() -> Self {
271 Arena {
272 data: Vec::new(),
273 span_info: Vec::new(),
274 }
275 }
276
277 #[allow(clippy::missing_const_for_fn)] pub fn into_inner(self) -> Vec<T> {
280 self.data
281 }
282
283 pub fn len(&self) -> usize {
285 self.data.len()
286 }
287
288 pub fn is_empty(&self) -> bool {
290 self.data.is_empty()
291 }
292
293 pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Handle<T>, &T)> {
296 self.data
297 .iter()
298 .enumerate()
299 .map(|(i, v)| unsafe { (Handle::from_usize_unchecked(i), v) })
300 }
301
302 pub fn drain(&mut self) -> impl DoubleEndedIterator<Item = (Handle<T>, T, Span)> {
304 let arena = std::mem::take(self);
305 arena
306 .data
307 .into_iter()
308 .zip(arena.span_info)
309 .enumerate()
310 .map(|(i, (v, span))| unsafe { (Handle::from_usize_unchecked(i), v, span) })
311 }
312
313 pub fn iter_mut(&mut self) -> impl DoubleEndedIterator<Item = (Handle<T>, &mut T)> {
316 self.data
317 .iter_mut()
318 .enumerate()
319 .map(|(i, v)| unsafe { (Handle::from_usize_unchecked(i), v) })
320 }
321
322 pub fn append(&mut self, value: T, span: Span) -> Handle<T> {
324 let index = self.data.len();
325 self.data.push(value);
326 self.span_info.push(span);
327 Handle::from_usize(index)
328 }
329
330 pub fn fetch_if<F: Fn(&T) -> bool>(&self, fun: F) -> Option<Handle<T>> {
332 self.data
333 .iter()
334 .position(fun)
335 .map(|index| unsafe { Handle::from_usize_unchecked(index) })
336 }
337
338 pub fn fetch_if_or_append<F: Fn(&T, &T) -> bool>(
343 &mut self,
344 value: T,
345 span: Span,
346 fun: F,
347 ) -> Handle<T> {
348 if let Some(index) = self.data.iter().position(|d| fun(d, &value)) {
349 unsafe { Handle::from_usize_unchecked(index) }
350 } else {
351 self.append(value, span)
352 }
353 }
354
355 pub fn fetch_or_append(&mut self, value: T, span: Span) -> Handle<T>
357 where
358 T: PartialEq,
359 {
360 self.fetch_if_or_append(value, span, T::eq)
361 }
362
363 pub fn try_get(&self, handle: Handle<T>) -> Result<&T, BadHandle> {
364 self.data
365 .get(handle.index())
366 .ok_or_else(|| BadHandle::new(handle))
367 }
368
369 pub fn get_mut(&mut self, handle: Handle<T>) -> &mut T {
371 self.data.get_mut(handle.index()).unwrap()
372 }
373
374 pub fn range_from(&self, old_length: usize) -> Range<T> {
376 Range {
377 inner: old_length as u32..self.data.len() as u32,
378 marker: PhantomData,
379 }
380 }
381
382 pub fn clear(&mut self) {
384 self.data.clear()
385 }
386
387 pub fn get_span(&self, handle: Handle<T>) -> Span {
388 *self
389 .span_info
390 .get(handle.index())
391 .unwrap_or(&Span::default())
392 }
393
394 pub fn check_contains_handle(&self, handle: Handle<T>) -> Result<(), BadHandle> {
396 if handle.index() < self.data.len() {
397 Ok(())
398 } else {
399 Err(BadHandle::new(handle))
400 }
401 }
402
403 pub fn check_contains_range(&self, range: &Range<T>) -> Result<(), BadRangeError> {
405 if range.inner.start > range.inner.end {
408 return Err(BadRangeError::new(range.clone()));
409 }
410
411 if range.inner.start == range.inner.end {
413 return Ok(());
414 }
415
416 let last_handle = Handle::new(range.inner.end.try_into().unwrap());
420 if self.check_contains_handle(last_handle).is_err() {
421 return Err(BadRangeError::new(range.clone()));
422 }
423
424 Ok(())
425 }
426
427 #[cfg(feature = "compact")]
428 pub(crate) fn retain_mut<P>(&mut self, mut predicate: P)
429 where
430 P: FnMut(Handle<T>, &mut T) -> bool,
431 {
432 let mut index = 0;
433 let mut retained = 0;
434 self.data.retain_mut(|elt| {
435 let handle = Handle::new(Index::new(index as u32 + 1).unwrap());
436 let keep = predicate(handle, elt);
437
438 if keep {
442 self.span_info[retained] = self.span_info[index];
443 retained += 1;
444 }
445
446 index += 1;
447 keep
448 });
449
450 self.span_info.truncate(retained);
451 }
452}
453
454#[cfg(feature = "deserialize")]
455impl<'de, T> serde::Deserialize<'de> for Arena<T>
456where
457 T: serde::Deserialize<'de>,
458{
459 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
460 where
461 D: serde::Deserializer<'de>,
462 {
463 let data = Vec::deserialize(deserializer)?;
464 let span_info = std::iter::repeat(Span::default())
465 .take(data.len())
466 .collect();
467
468 Ok(Self { data, span_info })
469 }
470}
471
472impl<T> ops::Index<Handle<T>> for Arena<T> {
473 type Output = T;
474 fn index(&self, handle: Handle<T>) -> &T {
475 &self.data[handle.index()]
476 }
477}
478
479impl<T> ops::IndexMut<Handle<T>> for Arena<T> {
480 fn index_mut(&mut self, handle: Handle<T>) -> &mut T {
481 &mut self.data[handle.index()]
482 }
483}
484
485impl<T> ops::Index<Range<T>> for Arena<T> {
486 type Output = [T];
487 fn index(&self, range: Range<T>) -> &[T] {
488 &self.data[range.inner.start as usize..range.inner.end as usize]
489 }
490}
491
492#[cfg(test)]
493mod tests {
494 use super::*;
495
496 #[test]
497 fn append_non_unique() {
498 let mut arena: Arena<u8> = Arena::new();
499 let t1 = arena.append(0, Default::default());
500 let t2 = arena.append(0, Default::default());
501 assert!(t1 != t2);
502 assert!(arena[t1] == arena[t2]);
503 }
504
505 #[test]
506 fn append_unique() {
507 let mut arena: Arena<u8> = Arena::new();
508 let t1 = arena.append(0, Default::default());
509 let t2 = arena.append(1, Default::default());
510 assert!(t1 != t2);
511 assert!(arena[t1] != arena[t2]);
512 }
513
514 #[test]
515 fn fetch_or_append_non_unique() {
516 let mut arena: Arena<u8> = Arena::new();
517 let t1 = arena.fetch_or_append(0, Default::default());
518 let t2 = arena.fetch_or_append(0, Default::default());
519 assert!(t1 == t2);
520 assert!(arena[t1] == arena[t2])
521 }
522
523 #[test]
524 fn fetch_or_append_unique() {
525 let mut arena: Arena<u8> = Arena::new();
526 let t1 = arena.fetch_or_append(0, Default::default());
527 let t2 = arena.fetch_or_append(1, Default::default());
528 assert!(t1 != t2);
529 assert!(arena[t1] != arena[t2]);
530 }
531}
532
533#[derive(Clone)]
548pub struct UniqueArena<T> {
549 set: FastIndexSet<T>,
550
551 span_info: Vec<Span>,
558}
559
560impl<T> UniqueArena<T> {
561 pub fn new() -> Self {
563 UniqueArena {
564 set: FastIndexSet::default(),
565 span_info: Vec::new(),
566 }
567 }
568
569 pub fn len(&self) -> usize {
571 self.set.len()
572 }
573
574 pub fn is_empty(&self) -> bool {
576 self.set.is_empty()
577 }
578
579 pub fn clear(&mut self) {
581 self.set.clear();
582 self.span_info.clear();
583 }
584
585 pub fn get_span(&self, handle: Handle<T>) -> Span {
590 *self
591 .span_info
592 .get(handle.index())
593 .unwrap_or(&Span::default())
594 }
595
596 #[cfg(feature = "compact")]
597 pub(crate) fn drain_all(&mut self) -> UniqueArenaDrain<T> {
598 UniqueArenaDrain {
599 inner_elts: self.set.drain(..),
600 inner_spans: self.span_info.drain(..),
601 index: Index::new(1).unwrap(),
602 }
603 }
604}
605
606#[cfg(feature = "compact")]
607pub(crate) struct UniqueArenaDrain<'a, T> {
608 inner_elts: indexmap::set::Drain<'a, T>,
609 inner_spans: std::vec::Drain<'a, Span>,
610 index: Index,
611}
612
613#[cfg(feature = "compact")]
614impl<'a, T> Iterator for UniqueArenaDrain<'a, T> {
615 type Item = (Handle<T>, T, Span);
616
617 fn next(&mut self) -> Option<Self::Item> {
618 match self.inner_elts.next() {
619 Some(elt) => {
620 let handle = Handle::new(self.index);
621 self.index = self.index.checked_add(1).unwrap();
622 let span = self.inner_spans.next().unwrap();
623 Some((handle, elt, span))
624 }
625 None => None,
626 }
627 }
628}
629
630impl<T: Eq + hash::Hash> UniqueArena<T> {
631 pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Handle<T>, &T)> {
634 self.set.iter().enumerate().map(|(i, v)| {
635 let position = i + 1;
636 let index = unsafe { Index::new_unchecked(position as u32) };
637 (Handle::new(index), v)
638 })
639 }
640
641 pub fn insert(&mut self, value: T, span: Span) -> Handle<T> {
656 let (index, added) = self.set.insert_full(value);
657
658 if added {
659 debug_assert!(index == self.span_info.len());
660 self.span_info.push(span);
661 }
662
663 debug_assert!(self.set.len() == self.span_info.len());
664
665 Handle::from_usize(index)
666 }
667
668 pub fn replace(&mut self, old: Handle<T>, new: T) {
675 let (index, added) = self.set.insert_full(new);
676 assert!(added && index == self.set.len() - 1);
677
678 self.set.swap_remove_index(old.index()).unwrap();
679 }
680
681 pub fn get(&self, value: &T) -> Option<Handle<T>> {
686 self.set
687 .get_index_of(value)
688 .map(|index| unsafe { Handle::from_usize_unchecked(index) })
689 }
690
691 pub fn get_handle(&self, handle: Handle<T>) -> Result<&T, BadHandle> {
693 self.set
694 .get_index(handle.index())
695 .ok_or_else(|| BadHandle::new(handle))
696 }
697
698 pub fn check_contains_handle(&self, handle: Handle<T>) -> Result<(), BadHandle> {
700 if handle.index() < self.set.len() {
701 Ok(())
702 } else {
703 Err(BadHandle::new(handle))
704 }
705 }
706}
707
708impl<T> Default for UniqueArena<T> {
709 fn default() -> Self {
710 Self::new()
711 }
712}
713
714impl<T: fmt::Debug + Eq + hash::Hash> fmt::Debug for UniqueArena<T> {
715 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
716 f.debug_map().entries(self.iter()).finish()
717 }
718}
719
720impl<T> ops::Index<Handle<T>> for UniqueArena<T> {
721 type Output = T;
722 fn index(&self, handle: Handle<T>) -> &T {
723 &self.set[handle.index()]
724 }
725}
726
727#[cfg(feature = "serialize")]
728impl<T> serde::Serialize for UniqueArena<T>
729where
730 T: Eq + hash::Hash + serde::Serialize,
731{
732 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
733 where
734 S: serde::Serializer,
735 {
736 self.set.serialize(serializer)
737 }
738}
739
740#[cfg(feature = "deserialize")]
741impl<'de, T> serde::Deserialize<'de> for UniqueArena<T>
742where
743 T: Eq + hash::Hash + serde::Deserialize<'de>,
744{
745 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
746 where
747 D: serde::Deserializer<'de>,
748 {
749 let set = FastIndexSet::deserialize(deserializer)?;
750 let span_info = std::iter::repeat(Span::default()).take(set.len()).collect();
751
752 Ok(Self { set, span_info })
753 }
754}
755
756#[cfg(feature = "arbitrary")]
758impl<'a, T> arbitrary::Arbitrary<'a> for UniqueArena<T>
759where
760 T: Eq + hash::Hash + arbitrary::Arbitrary<'a>,
761{
762 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
763 let mut arena = Self::default();
764 for elem in u.arbitrary_iter()? {
765 arena.set.insert(elem?);
766 arena.span_info.push(Span::UNDEFINED);
767 }
768 Ok(arena)
769 }
770
771 fn arbitrary_take_rest(u: arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
772 let mut arena = Self::default();
773 for elem in u.arbitrary_take_rest_iter()? {
774 arena.set.insert(elem?);
775 arena.span_info.push(Span::UNDEFINED);
776 }
777 Ok(arena)
778 }
779
780 #[inline]
781 fn size_hint(depth: usize) -> (usize, Option<usize>) {
782 let depth_hint = <usize as arbitrary::Arbitrary>::size_hint(depth);
783 arbitrary::size_hint::and(depth_hint, (0, None))
784 }
785}