Thanks for using Compiler Explorer
Sponsors
Jakt
C++
Ada
Analysis
Android Java
Android Kotlin
Assembly
C
C3
Carbon
C++ (Circle)
CIRCT
Clean
CMake
CMakeScript
COBOL
C++ for OpenCL
MLIR
Cppx
Cppx-Blue
Cppx-Gold
Cpp2-cppfront
Crystal
C#
CUDA C++
D
Dart
Elixir
Erlang
Fortran
F#
Go
Haskell
HLSL
Hook
Hylo
ispc
Java
Julia
Kotlin
LLVM IR
LLVM MIR
Modula-2
Nim
Objective-C
Objective-C++
OCaml
OpenCL C
Pascal
Pony
Python
Racket
Ruby
Rust
Snowball
Scala
Solidity
Spice
Swift
LLVM TableGen
Toit
TypeScript Native
V
Vala
Visual Basic
Zig
Javascript
GIMPLE
rust source #1
Output
Compile to binary object
Link to binary
Execute the code
Intel asm syntax
Demangle identifiers
Verbose demangling
Filters
Unused labels
Library functions
Directives
Comments
Horizontal whitespace
Debug intrinsics
Compiler
BPF gcc (trunk)
mrustc (master)
rustc 1.0.0
rustc 1.1.0
rustc 1.10.0
rustc 1.11.0
rustc 1.12.0
rustc 1.13.0
rustc 1.14.0
rustc 1.15.1
rustc 1.16.0
rustc 1.17.0
rustc 1.18.0
rustc 1.19.0
rustc 1.2.0
rustc 1.20.0
rustc 1.21.0
rustc 1.22.0
rustc 1.23.0
rustc 1.24.0
rustc 1.25.0
rustc 1.26.0
rustc 1.27.0
rustc 1.27.1
rustc 1.28.0
rustc 1.29.0
rustc 1.3.0
rustc 1.30.0
rustc 1.31.0
rustc 1.32.0
rustc 1.33.0
rustc 1.34.0
rustc 1.35.0
rustc 1.36.0
rustc 1.37.0
rustc 1.38.0
rustc 1.39.0
rustc 1.4.0
rustc 1.40.0
rustc 1.41.0
rustc 1.42.0
rustc 1.43.0
rustc 1.44.0
rustc 1.45.0
rustc 1.45.2
rustc 1.46.0
rustc 1.47.0
rustc 1.48.0
rustc 1.49.0
rustc 1.5.0
rustc 1.50.0
rustc 1.51.0
rustc 1.52.0
rustc 1.53.0
rustc 1.54.0
rustc 1.55.0
rustc 1.56.0
rustc 1.57.0
rustc 1.58.0
rustc 1.59.0
rustc 1.6.0
rustc 1.60.0
rustc 1.61.0
rustc 1.62.0
rustc 1.63.0
rustc 1.64.0
rustc 1.65.0
rustc 1.66.0
rustc 1.67.0
rustc 1.68.0
rustc 1.69.0
rustc 1.7.0
rustc 1.70.0
rustc 1.71.0
rustc 1.72.0
rustc 1.73.0
rustc 1.74.0
rustc 1.75.0
rustc 1.76.0
rustc 1.77.0
rustc 1.78.0
rustc 1.8.0
rustc 1.9.0
rustc beta
rustc nightly
rustc-cg-gcc (master)
x86-64 GCCRS (GCC master)
x86-64 GCCRS (GCCRS master)
Options
Source code
//! https://twitter.com/joseph_h_garvin/status/1597272949098438656 //! https://users.rust-lang.org/t/shrinking-bitset-with-compile-time-known-length/84244 #![allow(type_alias_bounds)] use core::{ any::type_name, fmt::{self, Debug}, iter::FusedIterator, marker::PhantomData as P, mem::size_of, ops::{Add, Range, Sub}, }; use num_traits::PrimInt; use typenum::{ consts::{U0, U128, U16, U32, U64, U8}, Diff, False, Gr, IsGreater, IsLessOrEqual, LeEq, Sum, True, Unsigned, }; #[doc(hidden)] pub trait Compute { type Result: ?Sized; } #[doc(hidden)] pub struct Ternary<Cond, IfTrue: ?Sized, Else: ?Sized>(P<(Cond, P<IfTrue>, Else)>); impl<T: ?Sized, F: ?Sized> Compute for Ternary<True, T, F> { type Result = T; } impl<T: ?Sized, F: ?Sized> Compute for Ternary<False, T, F> { type Result = F; } type Cond<C, T, F> = <Ternary<C, T, F> as Compute>::Result; trait GetTypenumType { type Result: Unsigned; } #[doc(hidden)] pub trait GetStorage { type Storage: PrimInt; } #[doc(hidden)] pub struct TypenumTyToStorage<N: Unsigned>(P<N>); type StorageForLen<N: Unsigned> = <TypenumTyToStorage<N> as GetStorage>::Storage; type CmpU64<N> = Cond<LeEq<N, U64>, u64, u128>; type CmpU32<N> = Cond<LeEq<N, U32>, u32, CmpU64<N>>; type CmpU16<N> = Cond<LeEq<N, U16>, u16, CmpU32<N>>; type CmpU8<N> = Cond<LeEq<N, U8>, u8, CmpU16<N>>; impl<N: Unsigned> GetStorage for TypenumTyToStorage<N> where // N: IsLessOrEqual<U128, Output = True>, // it's up to the user of this trait to enforce this; we return 128 if N > 64 N: IsGreater<U0, Output = True>, N: IsLessOrEqual<U64>, Ternary<LeEq<N, U64>, u64, u128>: Compute, CmpU64<N>: PrimInt, N: IsLessOrEqual<U32>, Ternary<LeEq<N, U32>, u32, CmpU64<N>>: Compute, CmpU32<N>: PrimInt, N: IsLessOrEqual<U16>, Ternary<LeEq<N, U16>, u16, CmpU32<N>>: Compute, CmpU16<N>: PrimInt, N: IsLessOrEqual<U8>, Ternary<LeEq<N, U8>, u8, CmpU16<N>>: Compute, CmpU8<N>: PrimInt, { type Storage = CmpU8<N>; } pub trait BitSetStorageAccess { fn try_get(&self, bit: usize) -> Result<bool, ()>; // TODO: error type fn try_set(&mut self, bit: usize, val: bool) -> Result<(), ()>; // TODO: error type #[inline(always)] fn get(&self, bit: usize) -> bool { self.try_get(bit).unwrap() } #[inline(always)] fn set(&mut self, bit: usize, val: bool) { self.try_set(bit, val).unwrap() } } impl<S: BitSetStorageAccess> BitSetStorageAccess for &'_ S { #[inline(always)] fn try_get(&self, bit: usize) -> Result<bool, ()> { S::try_get(self, bit) } #[inline(always)] fn try_set(&mut self, _: usize, _: bool) -> Result<(), ()> { Err(()) // yuck, TODO: split trait into read/write? } #[inline(always)] fn get(&self, bit: usize) -> bool { S::get(self, bit) } #[inline(always)] fn set(&mut self, _: usize, _: bool) { unimplemented!() } } #[doc(hidden)] #[derive(Debug, Default, Clone, Copy)] pub struct Empty; #[doc(hidden)] pub type Sentinel<Len: Unsigned> = BitSetStorageNode<Empty, Len, U0, ()>; impl BitSetStorageAccess for () { fn try_get(&self, _: usize) -> Result<bool, ()> { unreachable!() } fn try_set(&mut self, _: usize, _: bool) -> Result<(), ()> { unreachable!() } fn get(&self, _: usize) -> bool { unreachable!() } fn set(&mut self, _: usize, _: bool) { unreachable!() } } impl<L: Unsigned> BitSetStorageAccess for Sentinel<L> { #[inline(always)] fn try_get(&self, _: usize) -> Result<bool, ()> { Err(()) } #[inline(always)] fn try_set(&mut self, _: usize, _: bool) -> Result<(), ()> { Err(()) } #[inline(always)] fn get(&self, bit: usize) -> bool { panic!( "out of bounds: attempted to get index {bit} in a {} element bitset", L::USIZE ) } #[inline(always)] fn set(&mut self, bit: usize, _: bool) { panic!( "out of bounds: attempted to set index {bit} in a {} element bitset", L::USIZE ) } } #[doc(hidden)] pub struct BitSetStorageNode< Storage, WidthOffset: Unsigned, Len: Unsigned, Rest: BitSetStorageAccess, > { inner: Storage, rest: Rest, _width: P<(WidthOffset, Len)>, } impl<S, O: Unsigned, L: Unsigned, Rest: BitSetStorageAccess + Debug> Debug for BitSetStorageNode<S, O, L, Rest> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("BitSetStorageNode") .field("storage_ty", &type_name::<S>()) .field("range", &Self::range()) .field("rest", &self.rest) .finish() } } impl<S: Default, O: Unsigned, L: Unsigned, Rest: BitSetStorageAccess + Default> Default for BitSetStorageNode<S, O, L, Rest> { fn default() -> Self { Self { inner: Default::default(), rest: Default::default(), _width: P, } } } impl<S, O: Unsigned, L: Unsigned, R: BitSetStorageAccess> BitSetStorageNode<S, O, L, R> { #[inline(always)] const fn range() -> Range<usize> { debug_assert!( (size_of::<S>() * 8) >= L::USIZE, // "expected type {} (size = {} bytes) to have >= {} bits", // type_name::<S>(), // size_of::<S>(), // L::USIZE ); O::USIZE..(O::USIZE + L::USIZE) } #[inline(always)] fn relative(bit: usize) -> Option<usize> { if Self::range().contains(&bit) { Some(bit - O::USIZE) } else { None } } } impl<Storage, Offs, Len, Rest> BitSetStorageAccess for BitSetStorageNode<Storage, Offs, Len, Rest> where Storage: PrimInt, Offs: Unsigned, Len: Unsigned, Rest: BitSetStorageAccess, { #[inline(always)] fn try_get(&self, bit: usize) -> Result<bool, ()> { if let Some(bit_idx) = Self::relative(bit) { let zero = Storage::zero(); let mask = Storage::one() << bit_idx; Ok((self.inner & mask) != zero) } else { self.rest.try_get(bit) } } fn try_set(&mut self, bit: usize, val: bool) -> Result<(), ()> { if let Some(bit_idx) = Self::relative(bit) { let val = if val { Storage::one() } else { Storage::zero() } << bit_idx; self.inner = self.inner | val; Ok(()) } else { self.rest.try_set(bit, val) } } } #[doc(hidden)] pub trait GetStorageNodes { type Top: BitSetStorageAccess + Default; } // We prefer having fewer storage nodes over using as little space as possible // here; i.e. we represent 80 bit bitsets as 1 `u128` instead of as a `u64` and // a `u16`. #[doc(hidden)] pub struct LenToRootStorageNodeFewestNodes<Len: Unsigned, Offset: Unsigned = U0>((Len, Offset)); type FewestNodes<L: Unsigned, Offs = U0> = <LenToRootStorageNodeFewestNodes<L, Offs> as GetStorageNodes>::Top; #[doc(hidden)] pub struct LenToRootStorageNodeFewestNodesRecurse<Len: Unsigned, Offset: Unsigned, GreaterThan128>( (Len, Offset, GreaterThan128), ); impl<L: Unsigned, O: Unsigned> GetStorageNodes for LenToRootStorageNodeFewestNodesRecurse<L, O, False> { type Top = (); } impl<L: Unsigned, O: Unsigned> GetStorageNodes for LenToRootStorageNodeFewestNodesRecurse<L, O, True> where O: Add<U128>, Sum<O, U128>: Unsigned, L: IsGreater<U128, Output = True>, L: Sub<U128>, Diff<L, U128>: Unsigned, LenToRootStorageNodeFewestNodes<Diff<L, U128>, Sum<O, U128>>: GetStorageNodes, { type Top = FewestNodes<Diff<L, U128>, Sum<O, U128>>; } // if <= 128 bits, this is the final storage node: type LastNode<O, L> = BitSetStorageNode<StorageForLen<L>, O, L, Sentinel<Sum<O, L>>>; // if > 128 bits, add a 128 bit node and then recurse (with 128 subtracted from the length): type RecurseNode<O, L> = <LenToRootStorageNodeFewestNodesRecurse<L, O, Gr<L, U128>> as GetStorageNodes>::Top; type StorageTop<O, L> = Cond<Gr<L, U128>, BitSetStorageNode<u128, O, U128, RecurseNode<O, L>>, LastNode<O, L>>; impl<L: Unsigned, O: Unsigned> GetStorageNodes for LenToRootStorageNodeFewestNodes<L, O> where L: IsGreater<U128>, O: Add<L>, Sum<O, L>: Unsigned, LenToRootStorageNodeFewestNodesRecurse<L, O, Gr<L, U128>>: GetStorageNodes, TypenumTyToStorage<L>: GetStorage, LastNode<O, L>: BitSetStorageAccess, Ternary<Gr<L, U128>, BitSetStorageNode<u128, O, U128, RecurseNode<O, L>>, LastNode<O, L>>: Compute, StorageTop<O, L>: BitSetStorageAccess + Sized + Default, { type Top = StorageTop<O, L>; } pub struct BitSet<Len: Unsigned, Storage: BitSetStorageAccess = FewestNodes<Len, U0>> { inner: Storage, _len: P<Len>, } impl<L: Unsigned, S: BitSetStorageAccess + Default> Default for BitSet<L, S> { fn default() -> Self { Self { inner: Default::default(), _len: Default::default(), } } } impl<L: Unsigned, S: BitSetStorageAccess> BitSetStorageAccess for BitSet<L, S> { fn try_get(&self, bit: usize) -> Result<bool, ()> { self.inner.try_get(bit) } fn try_set(&mut self, bit: usize, val: bool) -> Result<(), ()> { self.inner.try_set(bit, val) } } impl BitSet<U0, ()> { pub fn new<L: Unsigned>() -> BitSet<L, FewestNodes<L>> where LenToRootStorageNodeFewestNodes<L, U0>: GetStorageNodes, { BitSet { inner: Default::default(), _len: Default::default(), } } } pub struct BitSetIterator<L: Unsigned, S: BitSetStorageAccess> { inner: S, curr_idx: usize, end: usize, _len: P<L>, } impl<L: Unsigned, S: BitSetStorageAccess> Iterator for BitSetIterator<L, S> { type Item = bool; fn next(&mut self) -> Option<Self::Item> { if self.curr_idx == self.end /* L::USIZE */ { None } else { let res = Some(self.inner.get(self.curr_idx)); self.curr_idx += 1; res } } fn size_hint(&self) -> (usize, Option<usize>) { let remaining = self.end - self.curr_idx; (remaining, Some(remaining)) } } impl<L: Unsigned, S: BitSetStorageAccess> DoubleEndedIterator for BitSetIterator<L, S> { fn next_back(&mut self) -> Option<Self::Item> { if self.curr_idx == self.end { None } else { self.end -= 1; Some(self.inner.get(self.end)) } } } // unsafe impl<L: Unsigned, S: BitSetStorageAccess> TrustedLen for BitSetIterator<L, S> { } impl<L: Unsigned, S: BitSetStorageAccess> ExactSizeIterator for BitSetIterator<L, S> {} impl<L: Unsigned, S: BitSetStorageAccess> FusedIterator for BitSetIterator<L, S> {} impl<L: Unsigned, S: BitSetStorageAccess> BitSet<L, S> { pub fn iter(&self) -> impl Iterator<Item = bool> + '_ { BitSetIterator::<L, &'_ Self> { inner: self, curr_idx: 0, end: L::USIZE, _len: P, } } } impl<L: Unsigned, S: BitSetStorageAccess> IntoIterator for BitSet<L, S> { type Item = bool; type IntoIter = BitSetIterator<L, S>; fn into_iter(self) -> Self::IntoIter { BitSetIterator { inner: self.inner, curr_idx: 0, end: L::USIZE, _len: P, } } } impl<L: Unsigned, S: BitSetStorageAccess> FromIterator<bool> for BitSet<L, S> where Self: Default, { fn from_iter<T: IntoIterator<Item = bool>>(iter: T) -> Self { let mut iter = iter.into_iter(); let mut out = Self::default(); for i in 0..L::USIZE { out.set(i, iter.next().unwrap()); } assert!(iter.next().is_none()); out } } pub fn roundtrip(num: u64) { let b: BitSet::<typenum::U64> = (0..64).map(|i| (num & (1 << i)) != 0).collect(); assert_eq!(b.inner.inner, num); let out = b.into_iter().rev().fold(0, |acc, b| { (acc << 1) | (b as u64) }); assert_eq!(out, num); } pub fn accessor(set: &BitSet::<U64>) -> bool { set.get(34) } pub fn accessor_panic(set: &BitSet::<typenum::U31>) -> bool { set.get(34) }
Become a Patron
Sponsor on GitHub
Donate via PayPal
Source on GitHub
Mailing list
Installed libraries
Wiki
Report an issue
How it works
Contact the author
CE on Mastodon
About the author
Statistics
Changelog
Version tree