Thanks for using Compiler Explorer
Sponsors
Jakt
C++
Ada
Algol68
Analysis
Android Java
Android Kotlin
Assembly
C
C3
Carbon
C with Coccinelle
C++ with Coccinelle
C++ (Circle)
CIRCT
Clean
Clojure
CMake
CMakeScript
COBOL
C++ for OpenCL
MLIR
Cppx
Cppx-Blue
Cppx-Gold
Cpp2-cppfront
Crystal
C#
CUDA C++
D
Dart
Elixir
Erlang
Fortran
F#
GLSL
Go
Haskell
HLSL
Helion
Hook
Hylo
IL
ispc
Java
Julia
Kotlin
LLVM IR
LLVM MIR
Modula-2
Mojo
Nim
Numba
Nix
Objective-C
Objective-C++
OCaml
Odin
OpenCL C
Pascal
Pony
PTX
Python
Racket
Raku
Ruby
Rust
Sail
Snowball
Scala
Slang
Solidity
Spice
SPIR-V
Swift
LLVM TableGen
Toit
Triton
TypeScript Native
V
Vala
Visual Basic
Vyper
WASM
Yul (Solidity IR)
Zig
Javascript
GIMPLE
Ygen
sway
rust source #1
Output
Compile to binary object
Link to binary
Execute the code
Intel asm syntax
Demangle identifiers
Verbose demangling
Filters
Unused labels
Library functions
Directives
Comments
Horizontal whitespace
Debug intrinsics
Compiler
mrustc (master)
rustc 1.0.0
rustc 1.1.0
rustc 1.10.0
rustc 1.11.0
rustc 1.12.0
rustc 1.13.0
rustc 1.14.0
rustc 1.15.1
rustc 1.16.0
rustc 1.17.0
rustc 1.18.0
rustc 1.19.0
rustc 1.2.0
rustc 1.20.0
rustc 1.21.0
rustc 1.22.0
rustc 1.23.0
rustc 1.24.0
rustc 1.25.0
rustc 1.26.0
rustc 1.27.0
rustc 1.27.1
rustc 1.28.0
rustc 1.29.0
rustc 1.3.0
rustc 1.30.0
rustc 1.31.0
rustc 1.32.0
rustc 1.33.0
rustc 1.34.0
rustc 1.35.0
rustc 1.36.0
rustc 1.37.0
rustc 1.38.0
rustc 1.39.0
rustc 1.4.0
rustc 1.40.0
rustc 1.41.0
rustc 1.42.0
rustc 1.43.0
rustc 1.44.0
rustc 1.45.0
rustc 1.45.2
rustc 1.46.0
rustc 1.47.0
rustc 1.48.0
rustc 1.49.0
rustc 1.5.0
rustc 1.50.0
rustc 1.51.0
rustc 1.52.0
rustc 1.53.0
rustc 1.54.0
rustc 1.55.0
rustc 1.56.0
rustc 1.57.0
rustc 1.58.0
rustc 1.59.0
rustc 1.6.0
rustc 1.60.0
rustc 1.61.0
rustc 1.62.0
rustc 1.63.0
rustc 1.64.0
rustc 1.65.0
rustc 1.66.0
rustc 1.67.0
rustc 1.68.0
rustc 1.69.0
rustc 1.7.0
rustc 1.70.0
rustc 1.71.0
rustc 1.72.0
rustc 1.73.0
rustc 1.74.0
rustc 1.75.0
rustc 1.76.0
rustc 1.77.0
rustc 1.78.0
rustc 1.79.0
rustc 1.8.0
rustc 1.80.0
rustc 1.81.0
rustc 1.82.0
rustc 1.83.0
rustc 1.84.0
rustc 1.85.0
rustc 1.86.0
rustc 1.87.0
rustc 1.88.0
rustc 1.89.0
rustc 1.9.0
rustc 1.90.0
rustc 1.91.0
rustc beta
rustc nightly
rustc-cg-gcc (master)
x86-64 GCCRS (GCC master)
x86-64 GCCRS (GCCRS master)
x86-64 GCCRS 14.1 (GCC assertions)
x86-64 GCCRS 14.1 (GCC)
x86-64 GCCRS 14.2 (GCC assertions)
x86-64 GCCRS 14.2 (GCC)
x86-64 GCCRS 14.3 (GCC assertions)
x86-64 GCCRS 14.3 (GCC)
x86-64 GCCRS 15.1 (GCC assertions)
x86-64 GCCRS 15.1 (GCC)
x86-64 GCCRS 15.2 (GCC assertions)
x86-64 GCCRS 15.2 (GCC)
Options
Source code
//#![feature(arc_into_inner)] //#![feature(get_mut_unchecked)] //#![feature(dropck_eyepatch)] //#![feature(allocator_api)] //#![feature(layout_for_ptr)] //#![feature(strict_provenance)] //use std::alloc::Allocator; use std::sync::Arc; use std::cell::UnsafeCell; use std::mem; use std::sync::atomic::Ordering::{Acquire, Release}; //use std::sync::Weak; use std::marker::PhantomData; use std::ptr::{self, NonNull}; use std::sync::atomic; //use std::alloc::Global; //use std::alloc::Layout; /* pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool { panic!(); //(ptr as *mut ()).addr() == usize::MAX } */ macro_rules! acquire { ($x:expr) => { atomic::fence(Acquire) }; } pub struct MyArc<T: ?Sized> { ptr: NonNull<MyArcInner<T>>, phantom: PhantomData<MyArcInner<T>>, } #[repr(C)] struct MyArcInner<T: ?Sized> { strong: atomic::AtomicUsize, // the value usize::MAX acts as a sentinel for temporarily "locking" the // ability to upgrade weak pointers or downgrade strong ones; this is used // to avoid races in `make_mut` and `get_mut`. weak: atomic::AtomicUsize, data: T, } pub struct MyWeak<T: ?Sized> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. #[allow(dead_code)] ptr: NonNull<MyArcInner<T>>, } /* impl<T: ?Sized> Drop for MyWeak<T> { /// Drops the `Weak` pointer. /// /// # Examples /// /// ``` /// use std::sync::{Arc, Weak}; /// /// struct Foo; /// /// impl Drop for Foo { /// fn drop(&mut self) { /// println!("dropped!"); /// } /// } /// /// let foo = Arc::new(Foo); /// let weak_foo = Arc::downgrade(&foo); /// let other_weak_foo = Weak::clone(&weak_foo); /// /// drop(weak_foo); // Doesn't print anything /// drop(foo); // Prints "dropped!" /// /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about // the memory orderings // // It's not necessary to check for the locked state here, because the // weak count can only be locked if there was precisely one weak ref, // meaning that drop could only subsequently run ON that remaining weak // ref, which can only happen after the lock is released. let inner = if let Some(inner) = self.inner() { inner } else { return }; if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); panic!();//unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) } } } } */ impl<T> MyArc<T> { #[inline] fn inner(&self) -> &MyArcInner<T> { // This unsafety is ok because while this arc is alive we're guaranteed // that the inner pointer is valid. Furthermore, we know that the // `ArcInner` structure itself is `Sync` because the inner data is // `Sync` as well, so we're ok loaning out an immutable pointer to these // contents. unsafe { self.ptr.as_ref() } } pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { // We are careful to *not* create a reference covering the "count" fields, as // this would alias with concurrent access to the reference counts (e.g. by `Weak`). unsafe { &mut (*this.ptr.as_ptr()).data } } pub fn into_inner(this: Self) -> Option<T> { // Make sure that the ordinary `Drop` implementation isn’t called as well let mut this = mem::ManuallyDrop::new(this); // Following the implementation of `drop` and `drop_slow` if this.inner().strong.fetch_sub(1, Release) != 1 { return None; } acquire!(this.inner().strong); // SAFETY: This mirrors the line // // unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; // // in `drop_slow`. Instead of dropping the value behind the pointer, // it is read and eventually returned; `ptr::read` has the same // safety conditions as `ptr::drop_in_place`. let inner = unsafe { ptr::read(Self::get_mut_unchecked(&mut this)) }; //drop(MyWeak { ptr: this.ptr }); unsafe { drop(std::mem::transmute::<_, std::sync::Weak<T>>(MyWeak {ptr: this.ptr})); } Some(inner) } } /* struct MyWeakInner<'a> { weak: &'a atomic::AtomicUsize, #[allow(dead_code)] strong: &'a atomic::AtomicUsize, } impl<T: ?Sized> MyWeak<T> { #[inline] fn inner(&self) -> Option<MyWeakInner<'_>> { if is_dangling(self.ptr.as_ptr()) { None } else { // We are careful to *not* create a reference covering the "data" field, as // the field may be mutated concurrently (for example, if the last `Arc` // is dropped, the data field will be dropped in-place). Some(unsafe { let ptr = self.ptr.as_ptr(); MyWeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } }) } } } */ /* #[inline(never)] pub fn a(aaa: Arc<u64>) { std::hint::black_box((Arc::into_inner(aaa)).map(|i| i * 232)); }*/ #[inline(never)] pub fn a2(aaa: Arc<u64>) { let aaa = unsafe { std::mem::transmute::<_, MyArc<u64>>(aaa) }; std::hint::black_box((MyArc::into_inner(aaa)).map(|i| i * 232)); } /* #[inline(never)] pub fn b(aaa: &mut Arc<u64>) { unsafe { std::hint::black_box(*Arc::get_mut_unchecked(aaa) += 232) }; } */ #[inline(never)] pub fn c(aaa: &mut Arc<UnsafeCell<u64>>) { unsafe { std::hint::black_box(*aaa.get() += 232) }; } pub fn main() { //a(std::sync::Arc::new(3)); //b(&mut std::sync::Arc::new(33)); c(&mut std::sync::Arc::new(std::cell::UnsafeCell::new(33))); }
Become a Patron
Sponsor on GitHub
Donate via PayPal
Compiler Explorer Shop
Source on GitHub
Mailing list
Installed libraries
Wiki
Report an issue
How it works
Contact the author
CE on Mastodon
CE on Bluesky
Statistics
Changelog
Version tree