Thanks for using Compiler Explorer
Sponsors
Jakt
C++
Ada
Algol68
Analysis
Android Java
Android Kotlin
Assembly
C
C3
Carbon
C with Coccinelle
C++ with Coccinelle
C++ (Circle)
CIRCT
Clean
Clojure
CMake
CMakeScript
COBOL
C++ for OpenCL
MLIR
Cppx
Cppx-Blue
Cppx-Gold
Cpp2-cppfront
Crystal
C#
CUDA C++
D
Dart
Elixir
Erlang
Fortran
F#
GLSL
Go
Haskell
HLSL
Helion
Hook
Hylo
IL
ispc
Java
Julia
Kotlin
LLVM IR
LLVM MIR
Modula-2
Mojo
Nim
Numba
Nix
Objective-C
Objective-C++
OCaml
Odin
OpenCL C
Pascal
Pony
PTX
Python
Racket
Raku
Ruby
Rust
Sail
Snowball
Scala
Slang
Solidity
Spice
SPIR-V
Swift
LLVM TableGen
Toit
Triton
TypeScript Native
V
Vala
Visual Basic
Vyper
WASM
Yul (Solidity IR)
Zig
Javascript
GIMPLE
Ygen
sway
rust source #1
Output
Compile to binary object
Link to binary
Execute the code
Intel asm syntax
Demangle identifiers
Verbose demangling
Filters
Unused labels
Library functions
Directives
Comments
Horizontal whitespace
Debug intrinsics
Compiler
mrustc (master)
rustc 1.0.0
rustc 1.1.0
rustc 1.10.0
rustc 1.11.0
rustc 1.12.0
rustc 1.13.0
rustc 1.14.0
rustc 1.15.1
rustc 1.16.0
rustc 1.17.0
rustc 1.18.0
rustc 1.19.0
rustc 1.2.0
rustc 1.20.0
rustc 1.21.0
rustc 1.22.0
rustc 1.23.0
rustc 1.24.0
rustc 1.25.0
rustc 1.26.0
rustc 1.27.0
rustc 1.27.1
rustc 1.28.0
rustc 1.29.0
rustc 1.3.0
rustc 1.30.0
rustc 1.31.0
rustc 1.32.0
rustc 1.33.0
rustc 1.34.0
rustc 1.35.0
rustc 1.36.0
rustc 1.37.0
rustc 1.38.0
rustc 1.39.0
rustc 1.4.0
rustc 1.40.0
rustc 1.41.0
rustc 1.42.0
rustc 1.43.0
rustc 1.44.0
rustc 1.45.0
rustc 1.45.2
rustc 1.46.0
rustc 1.47.0
rustc 1.48.0
rustc 1.49.0
rustc 1.5.0
rustc 1.50.0
rustc 1.51.0
rustc 1.52.0
rustc 1.53.0
rustc 1.54.0
rustc 1.55.0
rustc 1.56.0
rustc 1.57.0
rustc 1.58.0
rustc 1.59.0
rustc 1.6.0
rustc 1.60.0
rustc 1.61.0
rustc 1.62.0
rustc 1.63.0
rustc 1.64.0
rustc 1.65.0
rustc 1.66.0
rustc 1.67.0
rustc 1.68.0
rustc 1.69.0
rustc 1.7.0
rustc 1.70.0
rustc 1.71.0
rustc 1.72.0
rustc 1.73.0
rustc 1.74.0
rustc 1.75.0
rustc 1.76.0
rustc 1.77.0
rustc 1.78.0
rustc 1.79.0
rustc 1.8.0
rustc 1.80.0
rustc 1.81.0
rustc 1.82.0
rustc 1.83.0
rustc 1.84.0
rustc 1.85.0
rustc 1.86.0
rustc 1.87.0
rustc 1.88.0
rustc 1.89.0
rustc 1.9.0
rustc 1.90.0
rustc 1.91.0
rustc beta
rustc nightly
rustc-cg-gcc (master)
x86-64 GCCRS (GCC master)
x86-64 GCCRS (GCCRS master)
x86-64 GCCRS 14.1 (GCC assertions)
x86-64 GCCRS 14.1 (GCC)
x86-64 GCCRS 14.2 (GCC assertions)
x86-64 GCCRS 14.2 (GCC)
x86-64 GCCRS 14.3 (GCC assertions)
x86-64 GCCRS 14.3 (GCC)
x86-64 GCCRS 15.1 (GCC assertions)
x86-64 GCCRS 15.1 (GCC)
x86-64 GCCRS 15.2 (GCC assertions)
x86-64 GCCRS 15.2 (GCC)
Options
Source code
#[repr(align(2))] pub struct Zst; pub fn stow_zst() -> *mut () { crate::stow(Zst) } pub fn box_zst() -> Box<Zst> { Box::new(Zst) } pub unsafe fn ref_stowed_zst(stowed: &Stowaway<Zst>) -> &Zst { stowed.as_ref() } pub fn ref_boxed_zst(boxed: &Box<Zst>) -> &Zst { &boxed } pub unsafe fn unstow_zst(stowed: *mut ()) -> Zst { crate::unstow(stowed) } pub fn unbox_zst(boxed: Box<Zst>) -> Zst { *boxed } // extern crate alloc; // use alloc::boxed::Box; use std::fmt; use std::mem::{self, MaybeUninit}; use std::ops::{Deref, DerefMut}; use std::ptr; #[derive(Copy, Clone, PartialEq, Eq)] enum SizeClass { Zero, Packed, Boxed, } /// A maybe-allocated container. This struct stores a single `T` value, either /// by boxing it or (if `T` is small enough) by packing it directly into the bytes /// of a raw pointer. /// /// See the [module level documentation][crate] for more information. /// /// # Example /// /// ``` /// use stowaway::Stowaway; /// /// let value1: usize = 256; /// let value1_stowed = Stowaway::new(value1); /// let storage: *mut() = Stowaway::into_raw(value1_stowed); /// let value2_stowed = unsafe { Stowaway::from_raw(storage) }; /// let value2: usize = Stowaway::into_inner(value2_stowed); /// /// assert_eq!(value1, value2); /// ``` // TODO: Find a way to test that this actually does what it claims; that is, // that it boxes large values and copies small ones. #[repr(transparent)] pub struct Stowaway<T> { // TODO: Reimplemnt this as union, once we can have non-copy fields in a // union. storage: *mut T, } impl<T> Stowaway<T> { /// If true, we're packing a T into a *mut T. Otherwise, we're /// using a box. /// /// TODO: make this a const fn when && is allowed in const #[inline(always)] fn size_class() -> SizeClass { assert_eq!( mem::size_of::<*mut T>(), mem::size_of::<*mut ()>(), "Cannot currently stow pointers to DSTs" ); if mem::size_of::<T>() == 0 { SizeClass::Zero } else if mem::size_of::<T>() <= mem::size_of::<*mut T>() // Need to check alignment, just in case. However, in order for this // to fail, we'd need a T value with an alignment larger than its own // size. && mem::align_of::<T>() <= mem::align_of::<*mut T>() { SizeClass::Packed } else { SizeClass::Boxed } } /// Create a new `Stowaway`. If `T` can fit into a pointer, it will be /// stored directly in the struct; otherwise, it will be boxed and the /// `Box` will be stored in the struct. See the /// [module level documentation][crate] for more information. #[inline] pub fn new(value: T) -> Self { let storage = match Self::size_class() { SizeClass::Zero => { mem::forget(value); // ptr::null_mut() mem::align_of::<T>() as *mut T } SizeClass::Boxed => Box::into_raw(Box::new(value)), SizeClass::Packed => { // If T smaller than *mut T, or contains uninit bytes inernally, // we need to initialize the extra bytes. TODO: figure out a way // to initialize these bytes (to the satisfaction of defined // behavior) without zeroing them, if possible. let mut blob: MaybeUninit<*mut T> = MaybeUninit::zeroed(); let ptr = blob.as_mut_ptr(); unsafe { // Safety: We know that the underlying bytes are unused, and // that there are enough of them, and that blob takes ownership // of value. This write call is paired with a `read` call in // `into_inner`. ptr::write(ptr as *mut T, value); // Safety: all the bytes of blob were initialized, either // as zero or with `value` blob.assume_init() } } }; Self { storage } } /// Recreate a [`Stowaway`] from a raw pointer from a previous call /// to [`into_raw`][Stowaway::into_raw] or [`stow`]. The pointer **must** /// be discarded after the call to this function, because the returned /// [`Stowaway`] takes back ownership of the underlying `T` value. /// /// # Safety /// /// This function has similar safety requirements as [`std::ptr::read`], /// and [`Box::from_raw`] with the added caveat that the only valid way to /// create a `storage` pointer is with the [`stow`] or /// [`Stowaway::into_raw`] functions: /// /// - The `storage` value **must** have come from a previous `into_raw` /// or `stow` call for a value of exactly the same `T`. /// - This particular `storage` value **must not** be used to create /// any additional `Stowaway` values. Note that this applies even for /// `Copy` types, because the value may have been boxed. /// - You must take care to not recreate a [`Stowaway`] across a thread /// boundary for non-`Send` types. #[inline] pub unsafe fn from_raw(storage: *mut ()) -> Self { Self { storage: storage as *mut T, } } /// Unwrap this [`Stowaway`] and get the underlying value. #[inline] pub fn into_inner(stowed: Self) -> T { let storage = stowed.storage; mem::forget(stowed); match Self::size_class() { // Safety: ptr::read is guaranteed to be a no-op for a ZST SizeClass::Zero => unsafe { ptr::read(storage) }, // Safety: we previously created a box in `new` SizeClass::Boxed => *unsafe { Box::from_raw(storage) }, SizeClass::Packed => { // This can all be done with transmute_copy, but: // - I prefer to make sure the right casts are happening ( // T vs *T vs **T) // - transmute_copy uses an unaligned read, which we don't need // - I prefer to pair read/write calls, as opposed to using // `write` in `new` but `transmute` here. let ptr_to_storage: *const *mut T = &storage; let ptr_to_value: *const T = ptr_to_storage as *const T; // Safety: // // - This value was previously placed in storage by a call to `new` // - It won't be double-freed, because we did a forget earlier. unsafe { ptr::read(ptr_to_value) } } } } /// Get the storage pointer. Note that this is NOT a valid pointer, /// and can never be safely dereferenced or written to. The only safe /// thing to do with this pointer to to convert it back into a [`Stowaway`] /// (for instance, on the other side of an ffi boundary) with `from_raw`, /// or directly back into a `T` with [`unstow`]. /// /// The returned value has ownership of the underlying `T` value. /// therefore, it must be dropped as soon as possible after converting /// it back into a [`Stowaway`]. In particular, it is undefined behavior /// to create two [`Stowaway`] instances from the same raw pointer (even /// if `T` is `Copy`!). #[inline] pub fn into_raw(stowed: Self) -> *mut () { let storage = stowed.storage; mem::forget(stowed); storage as *mut () } } // These tests should fail miri test #[cfg(test)] mod test_for_uninit_bytes { use crate::{stow, unstow}; #[test] fn zst() { #[derive(Clone, Copy, Debug, Eq, PartialEq)] struct Zst; let x = Zst; let stowed = stow(x); let unstowed = unsafe { unstow(stowed) }; assert_eq!(x, unstowed); } #[test] fn small_t() { let x: u8 = 7; let stowed = stow(x); let unstowed = unsafe { unstow(stowed) }; assert_eq!(x, unstowed); } #[test] fn t_with_gaps_32() { #[derive(Clone, Copy, Debug, Eq, PartialEq)] struct Gaps32 { a: u8, b: u16, } let x = Gaps32 { a: 7, b: 42 }; let stowed = stow(x); let unstowed = unsafe { unstow(stowed) }; assert_eq!(x, unstowed); } #[test] fn t_with_gaps_64() { #[derive(Clone, Copy, Debug, Eq, PartialEq)] struct Gaps64 { a: u8, b: u32, } let x = Gaps64 { a: 7, b: 42 }; let stowed = stow(x); let unstowed = unsafe { unstow(stowed) }; assert_eq!(x, unstowed); } } impl<T> Drop for Stowaway<T> { fn drop(&mut self) { match Self::size_class() { // Safety: this value was previously owned by Self::new, and // ptr::read on a zero-size type is a no-op SizeClass::Zero => drop(unsafe { ptr::read(self.storage) }), // Safety: this box was previously created by Self::new SizeClass::Boxed => drop(unsafe { Box::from_raw(self.storage) }), SizeClass::Packed => { let storage = self.storage; let ptr_to_storage: *const *mut T = &storage; let ptr_to_value: *const T = ptr_to_storage as *const T; // Safety: // // - This value was previously placed in storage by a call to `new` drop(unsafe { ptr::read(ptr_to_value) }); } } } } #[cfg(test)] mod test_drop { use crate::Stowaway; use core::cell::Cell; use core::mem; use core::sync::atomic::{AtomicU32, Ordering}; struct DropCounter<'a> { counter: &'a Cell<u32>, } impl<'a> Drop for DropCounter<'a> { fn drop(&mut self) { self.counter.set(self.counter.get() + 1); } } #[test] fn zero_size_value() { static COUNTER: AtomicU32 = AtomicU32::new(0); #[derive(Debug)] struct StaticDropCounter; impl Drop for StaticDropCounter { fn drop(&mut self) { COUNTER.fetch_add(1, Ordering::SeqCst); } } { let value = StaticDropCounter; assert_eq!(COUNTER.load(Ordering::SeqCst), 0); let stowed_value = Stowaway::new(value); assert_eq!(COUNTER.load(Ordering::SeqCst), 0); let storage = Stowaway::into_raw(stowed_value); assert_eq!(COUNTER.load(Ordering::SeqCst), 0); let stowed_value = unsafe { Stowaway::<StaticDropCounter>::from_raw(storage) }; assert_eq!(COUNTER.load(Ordering::SeqCst), 0); mem::drop(stowed_value); assert_eq!(COUNTER.load(Ordering::SeqCst), 1); } } #[test] fn small_stowed_value() { let counter: Cell<u32> = Cell::new(0); // Create a value, cycle it through the Stowaway lifecycle, and // ensure it was dropped exactly once. let value = DropCounter { counter: &counter }; assert_eq!(counter.get(), 0); let stowed_value = Stowaway::new(value); assert_eq!(counter.get(), 0); let storage = Stowaway::into_raw(stowed_value); assert_eq!(counter.get(), 0); let stowed_value = unsafe { Stowaway::<DropCounter>::from_raw(storage) }; assert_eq!(counter.get(), 0); mem::drop(stowed_value); assert_eq!(counter.get(), 1); } #[test] fn small_raw_value() { let counter: Cell<u32> = Cell::new(0); // Create a value, cycle it through the Stowaway lifecycle, and // ensure it was dropped exactly once. let value = DropCounter { counter: &counter }; assert_eq!(counter.get(), 0); let stowed_value = Stowaway::new(value); assert_eq!(counter.get(), 0); let storage = Stowaway::into_raw(stowed_value); assert_eq!(counter.get(), 0); let stowed_value = unsafe { Stowaway::<DropCounter>::from_raw(storage) }; assert_eq!(counter.get(), 0); let raw_value: DropCounter = Stowaway::into_inner(stowed_value); assert_eq!(counter.get(), 0); mem::drop(raw_value); assert_eq!(counter.get(), 1); } #[test] fn large_stowed_value() { let counter: Cell<u32> = Cell::new(0); // Create a large array of DropCounters, cycle it through the // Stowaway lifecycle, and ensure it was dropped exactly once. let value: [DropCounter; 16] = [ DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, ]; assert_eq!(counter.get(), 0); let stowed_value = Stowaway::new(value); assert_eq!(counter.get(), 0); let storage = Stowaway::into_raw(stowed_value); assert_eq!(counter.get(), 0); let stowed_value = unsafe { Stowaway::<[DropCounter; 16]>::from_raw(storage) }; assert_eq!(counter.get(), 0); mem::drop(stowed_value); assert_eq!(counter.get(), 16); } #[test] fn large_raw_stowed_value() { let counter: Cell<u32> = Cell::new(0); // Create a large array of DropCounters, cycle it through the // Stowaway lifecycle, and ensure it was dropped exactly once. let value: [DropCounter; 16] = [ DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, DropCounter { counter: &counter }, ]; assert_eq!(counter.get(), 0); let stowed_value = Stowaway::new(value); assert_eq!(counter.get(), 0); let storage = Stowaway::into_raw(stowed_value); assert_eq!(counter.get(), 0); let stowed_value = unsafe { Stowaway::<[DropCounter; 16]>::from_raw(storage) }; assert_eq!(counter.get(), 0); let raw_value: [DropCounter; 16] = Stowaway::into_inner(stowed_value); assert_eq!(counter.get(), 0); mem::drop(raw_value); assert_eq!(counter.get(), 16); } } impl<T: Default> Default for Stowaway<T> { fn default() -> Self { Self::new(T::default()) } } impl<T> From<T> for Stowaway<T> { fn from(value: T) -> Self { Self::new(value) } } impl<T> AsRef<T> for Stowaway<T> { #[inline] fn as_ref(&self) -> &T { let ptr_to_storage = match Self::size_class() { // In the ZST case, ptr::read is a no-op, so the null ptr here is fine SizeClass::Zero => self.storage, // In the box case, storage IS a valid pointer, so simply // dereference it SizeClass::Boxed => self.storage, SizeClass::Packed => (&self.storage) as *const *mut T as *const T, }; unsafe { &*ptr_to_storage } } } impl<T> AsMut<T> for Stowaway<T> { #[inline] fn as_mut(&mut self) -> &mut T { let ptr_to_storage = match Self::size_class() { // In the ZST case, ptr::read is a no-op, so the null ptr here is fine SizeClass::Zero => self.storage, // In the box case, storage IS a valid pointer, so simply // dereference it SizeClass::Boxed => self.storage, SizeClass::Packed => (&mut self.storage) as *mut *mut T as *mut T, }; unsafe { &mut *ptr_to_storage } } } impl<T> Deref for Stowaway<T> { type Target = T; #[inline] fn deref(&self) -> &T { self.as_ref() } } impl<T> DerefMut for Stowaway<T> { #[inline] fn deref_mut(&mut self) -> &mut T { self.as_mut() } } impl<T: Clone> Clone for Stowaway<T> { #[inline] fn clone(&self) -> Self { Self::new(self.as_ref().clone()) } #[inline] fn clone_from(&mut self, other: &Self) { self.as_mut().clone_from(other.as_ref()); } } unsafe impl<T: Send> Send for Stowaway<T> {} unsafe impl<T: Sync> Sync for Stowaway<T> {} /// Stow a value into a `*mut ()`. This function will store the value's bytes /// directly into the pointer if it will fit; otherwise it will box the value /// and return the raw pointer. The value can be unstowed with a call to /// [`unstow`], or converted into a [`Stowaway`] with [`Stowaway::from_raw`] /// /// This is the equivalent of `Stowaway::into_raw(Stowaway::new(value))` #[inline] pub fn stow<T>(value: T) -> *mut () { Stowaway::into_raw(Stowaway::new(value)) } /// Restore a value that was previously stowed, either with [`stow`] or with /// [`Stowaway::into_raw`]. The `storage` pointer **must** be discarded after /// the call to this function, as this function takes back ownership of the /// inner `T` value. /// /// If you don't need a `T` value– that is, if an `&T` or `&mut T` would /// suffice– consider using [`Stowaway::from_raw`] instead, as that will omit /// the extra copy out of the box if the value is boxed. /// /// This is the equivalent of `Stowaway::into_inner(Stowaway::from_raw(storage))` /// /// # Safety /// /// This function has similar safety requirements as [`std::ptr::read`] /// and [`Box::from_raw`] with the added caveat that the only valid way to /// create a `storage` pointer is with the [`stow`] or [`Stowaway::into_raw`] /// functions: /// /// - The `storage` value **must** have come from a previous [`stow`] or /// [`Stowaway::into_raw`] call for a value of exactly the same `T`. /// - This particular `storage` value **must not** be restored again. Note that /// this applies even for `Copy` types, because the value may have been boxed. /// - This function does not respect marker traits; you must take care not to /// pass a non-`Send` type to a different thread. #[inline] pub unsafe fn unstow<T>(storage: *mut ()) -> T { Stowaway::into_inner(Stowaway::from_raw(storage)) } /// Get a reference to a value that was previously stowed, either with [`stow`] /// or with [`Stowaway::into_raw`]. This function does *not* take ownership /// of the value in `storage`, but it does create a shared reference to it, so /// you must take care to not separately take ownership of it somewhere else, /// or create a mutable reference to it. It is safe to create multiple shared /// references with the same `storage`, though take care to respect `Sync` in /// threaded applications. /// /// # Interior mutability /// /// Many C-like APIs will provide copies of the `void*` context pointer to /// their functions. While constructing shared references with these copies is /// sound, keep in mind that changes made (for example, through a Cell) may /// not be reflected in other references unless the underlying API propogates /// them. In general you should use a `Box` rather than a `Stowaway` if you /// need shared mutablility through a pointer like this. /// /// # Safety /// /// This function has similar safety requirements as turning a pointer into /// a reference. /// /// - The `storage` value **must** have come from a previous [`stow`] or /// [`Stowaway::into_raw`] call for a value of exactly the same `T`. /// - You **must** not unstow the value or create a mutable reference to it /// while this or any other shared reference to it exist. /// - This function does not respect marker traits; you must take care not /// to create a shared reference to a non-`Sync` type across a thread boundary. /// /// # Example /// /// ``` /// use stowaway::{ref_from_stowed, stow, unstow}; /// /// let value: i16 = 143; /// let mut storage = stow(value); /// { /// let value_ref_1: &i16 = unsafe { ref_from_stowed(&storage) }; /// let value_ref_2: &i16 = unsafe { ref_from_stowed(&storage) }; /// assert_eq!(value_ref_1, &143); /// assert_eq!(value_ref_2, &143); /// } /// /// // Need to make sure we drop the value; /// let value: i16 = unsafe { unstow(storage) }; /// ``` #[inline] pub unsafe fn ref_from_stowed<'a, T>(storage_ref: &'a *mut ()) -> &'a T { // Safety: because we use repr(transparent), this is a safe conversion let stowaway_ref: &'a Stowaway<T> = &*(storage_ref as *const *mut () as *const Stowaway<T>); stowaway_ref.as_ref() } #[test] fn test_ref_from_stowed_small() { let value: u16 = 173; let storage = stow(value); { let value_ref_1: &u16 = unsafe { ref_from_stowed(&storage) }; let value_ref_2: &u16 = unsafe { ref_from_stowed(&storage) }; assert_eq!(*value_ref_1, 173); assert_eq!(*value_ref_2, 173); } // drop stowed let _stowed: Stowaway<u16> = unsafe { Stowaway::from_raw(storage) }; } #[test] fn test_ref_from_stowed_large() { use alloc::vec; use alloc::vec::Vec; let value: Vec<i64> = vec![3245, 5675, 4653, 1234, 7345]; let storage = stow(value); { let value_ref_1: &Vec<i64> = unsafe { ref_from_stowed(&storage) }; let value_ref_2: &Vec<i64> = unsafe { ref_from_stowed(&storage) }; assert_eq!(value_ref_1[3], 1234); assert_eq!(value_ref_2[1], 5675); } // drop stowed let _stowed: Stowaway<Vec<i64>> = unsafe { Stowaway::from_raw(storage) }; } /// Get a mutable reference to a value that was previously stowed, either with /// [`stow`] or with [`Stowaway::into_raw`]. This function does *not* take /// ownership of the value in `storage`, but it does create a mutable reference /// to it, so you must take care to not separately take ownership of it /// somewhere else, or create any other shared or mutable references to it. /// /// # Safety /// /// This function has similar safety requirements as turning a mutable pointer /// into a mutable reference. /// /// - The `storage` value **must** have come from a previous [`stow`] or /// [`Stowaway::into_raw`] call for a value of exactly the same `T`. /// - You **must** not unstow the value or create any other mutable or shared /// references to it while this mutable reference exists. /// - This function does not respect marker traits; you must take care not /// to create a mutable reference across a thread boundary unless /// `&mut T: Send`. /// /// # Example /// /// ``` /// use stowaway::{mut_ref_from_stowed, stow, unstow}; /// /// let value: Vec<i64> = vec![1, 2, 3, 4]; /// let mut storage = stow(value); /// { /// let value_ref: &mut Vec<i64> = unsafe { mut_ref_from_stowed(&mut storage) }; /// value_ref.push(5); /// value_ref.push(6); /// } /// let value: Vec<i64> = unsafe { unstow(storage) }; /// assert_eq!(&value, &[1, 2, 3, 4, 5, 6]); /// ``` #[inline] pub unsafe fn mut_ref_from_stowed<'a, T>(storage_ref: &'a mut *mut ()) -> &'a mut T { // Safety: because we use repr(transparent), this is a safe conversion let stowaway_ref: &'a mut Stowaway<T> = &mut *(storage_ref as *mut *mut () as *mut Stowaway<T>); stowaway_ref.as_mut() } #[test] fn test_mut_ref_from_stowed_small() { let value: u16 = 173; let mut storage = stow(value); { let value_ref: &mut u16 = unsafe { mut_ref_from_stowed(&mut storage) }; *value_ref += 55; } // drop stowed let value: u16 = unsafe { unstow(storage) }; assert_eq!(value, 228); } #[test] fn test_mut_ref_from_stowed_large() { use alloc::vec; use alloc::vec::Vec; let value: Vec<i64> = vec![3245, 5675, 4653, 1234, 7345]; let mut storage = stow(value); { let value_ref: &mut Vec<i64> = unsafe { mut_ref_from_stowed(&mut storage) }; value_ref.push(10); value_ref.push(12); } // drop stowed let value: Vec<i64> = unsafe { unstow(storage) }; assert_eq!(&value, &[3245, 5675, 4653, 1234, 7345, 10, 12]); }
Become a Patron
Sponsor on GitHub
Donate via PayPal
Compiler Explorer Shop
Source on GitHub
Mailing list
Installed libraries
Wiki
Report an issue
How it works
Contact the author
CE on Mastodon
CE on Bluesky
Statistics
Changelog
Version tree