diff options
| author | Laurențiu Nicola <lnicola@users.noreply.github.com> | 2025-01-20 09:29:00 +0000 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2025-01-20 09:29:00 +0000 |
| commit | 141e53b7154083afcfb2e0fb0334e554d0c146a4 (patch) | |
| tree | 1707172316fef25a86219934940bf7d3196d263f /compiler/rustc_data_structures/src | |
| parent | 61af2cc09a25a51015ed1c0e3ff9ce3dc8323987 (diff) | |
| parent | 3af5c080e61a00986852fc12ff2681238eaaa2dc (diff) | |
| download | rust-141e53b7154083afcfb2e0fb0334e554d0c146a4.tar.gz rust-141e53b7154083afcfb2e0fb0334e554d0c146a4.zip | |
Merge pull request #18980 from lnicola/sync-from-rust
minor: Sync from downstream
Diffstat (limited to 'compiler/rustc_data_structures/src')
18 files changed, 450 insertions, 1003 deletions
diff --git a/compiler/rustc_data_structures/src/flock.rs b/compiler/rustc_data_structures/src/flock.rs index e03962a54ec..292a33d5646 100644 --- a/compiler/rustc_data_structures/src/flock.rs +++ b/compiler/rustc_data_structures/src/flock.rs @@ -4,6 +4,7 @@ //! green/native threading. This is just a bare-bones enough solution for //! librustdoc, it is not production quality at all. +#[cfg(bootstrap)] cfg_match! { cfg(target_os = "linux") => { mod linux; @@ -27,4 +28,28 @@ cfg_match! { } } +#[cfg(not(bootstrap))] +cfg_match! { + target_os = "linux" => { + mod linux; + use linux as imp; + } + target_os = "redox" => { + mod linux; + use linux as imp; + } + unix => { + mod unix; + use unix as imp; + } + windows => { + mod windows; + use self::windows as imp; + } + _ => { + mod unsupported; + use unsupported as imp; + } +} + pub use imp::Lock; diff --git a/compiler/rustc_data_structures/src/graph/implementation/mod.rs b/compiler/rustc_data_structures/src/graph/implementation/mod.rs index 43fdfe6ee0d..7724e9347d8 100644 --- a/compiler/rustc_data_structures/src/graph/implementation/mod.rs +++ b/compiler/rustc_data_structures/src/graph/implementation/mod.rs @@ -22,7 +22,7 @@ use std::fmt::Debug; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use tracing::debug; #[cfg(test)] @@ -214,7 +214,7 @@ impl<N: Debug, E: Debug> Graph<N, E> { direction: Direction, entry_node: NodeIndex, ) -> Vec<NodeIndex> { - let mut visited = BitSet::new_empty(self.len_nodes()); + let mut visited = DenseBitSet::new_empty(self.len_nodes()); let mut stack = vec![]; let mut result = Vec::with_capacity(self.len_nodes()); let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| { @@ -287,7 +287,7 @@ impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> { pub struct DepthFirstTraversal<'g, N, E> { graph: &'g Graph<N, E>, stack: Vec<NodeIndex>, - visited: BitSet<usize>, + visited: DenseBitSet<usize>, direction: Direction, } @@ -297,7 +297,7 @@ impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> { start_node: NodeIndex, direction: Direction, ) -> Self { - let mut visited = BitSet::new_empty(graph.len_nodes()); + let mut visited = DenseBitSet::new_empty(graph.len_nodes()); visited.insert(start_node.node_id()); DepthFirstTraversal { graph, stack: vec![start_node], visited, direction } } diff --git a/compiler/rustc_data_structures/src/graph/iterate/mod.rs b/compiler/rustc_data_structures/src/graph/iterate/mod.rs index cbc6664d853..7b4573d7a84 100644 --- a/compiler/rustc_data_structures/src/graph/iterate/mod.rs +++ b/compiler/rustc_data_structures/src/graph/iterate/mod.rs @@ -1,6 +1,6 @@ use std::ops::ControlFlow; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{IndexSlice, IndexVec}; use super::{DirectedGraph, StartNode, Successors}; @@ -78,7 +78,7 @@ where { graph: G, stack: Vec<G::Node>, - visited: BitSet<G::Node>, + visited: DenseBitSet<G::Node>, } impl<G> DepthFirstSearch<G> @@ -86,7 +86,7 @@ where G: DirectedGraph + Successors, { pub fn new(graph: G) -> Self { - Self { stack: vec![], visited: BitSet::new_empty(graph.num_nodes()), graph } + Self { stack: vec![], visited: DenseBitSet::new_empty(graph.num_nodes()), graph } } /// Version of `push_start_node` that is convenient for chained @@ -125,6 +125,16 @@ where pub fn visited(&self, node: G::Node) -> bool { self.visited.contains(node) } + + /// Returns a reference to the set of nodes that have been visited, with + /// the same caveats as [`Self::visited`]. + /// + /// When incorporating the visited nodes into another bitset, using bulk + /// operations like `union` or `intersect` can be more efficient than + /// processing each node individually. + pub fn visited_set(&self) -> &DenseBitSet<G::Node> { + &self.visited + } } impl<G> std::fmt::Debug for DepthFirstSearch<G> @@ -207,8 +217,8 @@ where { graph: &'graph G, stack: Vec<Event<G::Node>>, - visited: BitSet<G::Node>, - settled: BitSet<G::Node>, + visited: DenseBitSet<G::Node>, + settled: DenseBitSet<G::Node>, } impl<'graph, G> TriColorDepthFirstSearch<'graph, G> @@ -219,8 +229,8 @@ where TriColorDepthFirstSearch { graph, stack: vec![], - visited: BitSet::new_empty(graph.num_nodes()), - settled: BitSet::new_empty(graph.num_nodes()), + visited: DenseBitSet::new_empty(graph.num_nodes()), + settled: DenseBitSet::new_empty(graph.num_nodes()), } } diff --git a/compiler/rustc_data_structures/src/graph/mod.rs b/compiler/rustc_data_structures/src/graph/mod.rs index 103ddd917bf..92035e8bc48 100644 --- a/compiler/rustc_data_structures/src/graph/mod.rs +++ b/compiler/rustc_data_structures/src/graph/mod.rs @@ -4,6 +4,7 @@ pub mod dominators; pub mod implementation; pub mod iterate; mod reference; +pub mod reversed; pub mod scc; pub mod vec_graph; diff --git a/compiler/rustc_data_structures/src/graph/reversed.rs b/compiler/rustc_data_structures/src/graph/reversed.rs new file mode 100644 index 00000000000..9b726deaa15 --- /dev/null +++ b/compiler/rustc_data_structures/src/graph/reversed.rs @@ -0,0 +1,42 @@ +use crate::graph::{DirectedGraph, Predecessors, Successors}; + +/// View that reverses the direction of edges in its underlying graph, so that +/// successors become predecessors and vice-versa. +/// +/// Because of `impl<G: Graph> Graph for &G`, the underlying graph can be +/// wrapped by-reference instead of by-value if desired. +#[derive(Clone, Copy, Debug)] +pub struct ReversedGraph<G> { + pub inner: G, +} + +impl<G> ReversedGraph<G> { + pub fn new(inner: G) -> Self { + Self { inner } + } +} + +impl<G: DirectedGraph> DirectedGraph for ReversedGraph<G> { + type Node = G::Node; + + fn num_nodes(&self) -> usize { + self.inner.num_nodes() + } +} + +// Implementing `StartNode` is not possible in general, because the start node +// of an underlying graph is instead an _end_ node in the reversed graph. +// But would be possible to define another wrapper type that adds an explicit +// start node to its underlying graph, if desired. + +impl<G: Predecessors> Successors for ReversedGraph<G> { + fn successors(&self, node: Self::Node) -> impl Iterator<Item = Self::Node> { + self.inner.predecessors(node) + } +} + +impl<G: Successors> Predecessors for ReversedGraph<G> { + fn predecessors(&self, node: Self::Node) -> impl Iterator<Item = Self::Node> { + self.inner.successors(node) + } +} diff --git a/compiler/rustc_data_structures/src/marker.rs b/compiler/rustc_data_structures/src/marker.rs index 2b629024bfe..6ae97222f77 100644 --- a/compiler/rustc_data_structures/src/marker.rs +++ b/compiler/rustc_data_structures/src/marker.rs @@ -72,7 +72,7 @@ impl_dyn_send!( [Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend] [Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend] [crate::sync::RwLock<T> where T: DynSend] - [crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Send + crate::tagged_ptr::Pointer, T: Send + crate::tagged_ptr::Tag, const CP: bool] + [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Send + crate::tagged_ptr::Tag] [rustc_arena::TypedArena<T> where T: DynSend] [indexmap::IndexSet<V, S> where V: DynSend, S: DynSend] [indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend] @@ -148,7 +148,7 @@ impl_dyn_sync!( [crate::sync::RwLock<T> where T: DynSend + DynSync] [crate::sync::WorkerLocal<T> where T: DynSend] [crate::intern::Interned<'a, T> where 'a, T: DynSync] - [crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Sync + crate::tagged_ptr::Pointer, T: Sync + crate::tagged_ptr::Tag, const CP: bool] + [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Sync + crate::tagged_ptr::Tag] [parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend] [parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync] [indexmap::IndexSet<V, S> where V: DynSync, S: DynSync] diff --git a/compiler/rustc_data_structures/src/profiling.rs b/compiler/rustc_data_structures/src/profiling.rs index 19050746c2f..18e98e6c39f 100644 --- a/compiler/rustc_data_structures/src/profiling.rs +++ b/compiler/rustc_data_structures/src/profiling.rs @@ -860,6 +860,7 @@ fn get_thread_id() -> u32 { } // Memory reporting +#[cfg(bootstrap)] cfg_match! { cfg(windows) => { pub fn get_resident_set_size() -> Option<usize> { @@ -921,5 +922,67 @@ cfg_match! { } } +#[cfg(not(bootstrap))] +cfg_match! { + windows => { + pub fn get_resident_set_size() -> Option<usize> { + use std::mem; + + use windows::{ + Win32::System::ProcessStatus::{K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS}, + Win32::System::Threading::GetCurrentProcess, + }; + + let mut pmc = PROCESS_MEMORY_COUNTERS::default(); + let pmc_size = mem::size_of_val(&pmc); + unsafe { + K32GetProcessMemoryInfo( + GetCurrentProcess(), + &mut pmc, + pmc_size as u32, + ) + } + .ok() + .ok()?; + + Some(pmc.WorkingSetSize) + } + } + target_os = "macos" => { + pub fn get_resident_set_size() -> Option<usize> { + use libc::{c_int, c_void, getpid, proc_pidinfo, proc_taskinfo, PROC_PIDTASKINFO}; + use std::mem; + const PROC_TASKINFO_SIZE: c_int = mem::size_of::<proc_taskinfo>() as c_int; + + unsafe { + let mut info: proc_taskinfo = mem::zeroed(); + let info_ptr = &mut info as *mut proc_taskinfo as *mut c_void; + let pid = getpid() as c_int; + let ret = proc_pidinfo(pid, PROC_PIDTASKINFO, 0, info_ptr, PROC_TASKINFO_SIZE); + if ret == PROC_TASKINFO_SIZE { + Some(info.pti_resident_size as usize) + } else { + None + } + } + } + } + unix => { + pub fn get_resident_set_size() -> Option<usize> { + let field = 1; + let contents = fs::read("/proc/self/statm").ok()?; + let contents = String::from_utf8(contents).ok()?; + let s = contents.split_whitespace().nth(field)?; + let npages = s.parse::<usize>().ok()?; + Some(npages * 4096) + } + } + _ => { + pub fn get_resident_set_size() -> Option<usize> { + None + } + } +} + #[cfg(test)] mod tests; diff --git a/compiler/rustc_data_structures/src/stable_hasher.rs b/compiler/rustc_data_structures/src/stable_hasher.rs index 0872bd2c9ac..9cd0cc499ca 100644 --- a/compiler/rustc_data_structures/src/stable_hasher.rs +++ b/compiler/rustc_data_structures/src/stable_hasher.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use std::mem; use std::num::NonZero; -use rustc_index::bit_set::{self, BitSet}; +use rustc_index::bit_set::{self, DenseBitSet}; use rustc_index::{Idx, IndexSlice, IndexVec}; use smallvec::SmallVec; @@ -544,7 +544,7 @@ where } } -impl<I: Idx, CTX> HashStable<CTX> for BitSet<I> { +impl<I: Idx, CTX> HashStable<CTX> for DenseBitSet<I> { fn hash_stable(&self, _ctx: &mut CTX, hasher: &mut StableHasher) { ::std::hash::Hash::hash(self, hasher); } diff --git a/compiler/rustc_data_structures/src/stable_hasher/tests.rs b/compiler/rustc_data_structures/src/stable_hasher/tests.rs index aab50a13af0..635f241847c 100644 --- a/compiler/rustc_data_structures/src/stable_hasher/tests.rs +++ b/compiler/rustc_data_structures/src/stable_hasher/tests.rs @@ -17,9 +17,9 @@ fn hash<T: HashStable<()>>(t: &T) -> Hash128 { // Check that bit set hash includes the domain size. #[test] fn test_hash_bit_set() { - use rustc_index::bit_set::BitSet; - let a: BitSet<usize> = BitSet::new_empty(1); - let b: BitSet<usize> = BitSet::new_empty(2); + use rustc_index::bit_set::DenseBitSet; + let a: DenseBitSet<usize> = DenseBitSet::new_empty(1); + let b: DenseBitSet<usize> = DenseBitSet::new_empty(2); assert_ne!(a, b); assert_ne!(hash(&a), hash(&b)); } diff --git a/compiler/rustc_data_structures/src/tagged_ptr.rs b/compiler/rustc_data_structures/src/tagged_ptr.rs index 2914eece679..94db421f77e 100644 --- a/compiler/rustc_data_structures/src/tagged_ptr.rs +++ b/compiler/rustc_data_structures/src/tagged_ptr.rs @@ -1,116 +1,26 @@ -//! This module implements tagged pointers. +//! This module implements tagged pointers. In order to utilize the pointer +//! packing, you must have a tag type implementing the [`Tag`] trait. //! -//! In order to utilize the pointer packing, you must have two types: a pointer, -//! and a tag. -//! -//! The pointer must implement the [`Pointer`] trait, with the primary -//! requirement being convertible to and from a raw pointer. Note that the -//! pointer must be dereferenceable, so raw pointers generally cannot implement -//! the [`Pointer`] trait. This implies that the pointer must also be non-null. -//! -//! Many common pointer types already implement the [`Pointer`] trait. -//! -//! The tag must implement the [`Tag`] trait. -//! -//! We assert that the tag and the [`Pointer`] types are compatible at compile +//! We assert that the tag and the reference type is compatible at compile //! time. +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::num::NonZero; use std::ops::Deref; use std::ptr::NonNull; -use std::rc::Rc; -use std::sync::Arc; use crate::aligned::Aligned; +use crate::stable_hasher::{HashStable, StableHasher}; -mod copy; -mod drop; -mod impl_tag; - -pub use copy::CopyTaggedPtr; -pub use drop::TaggedPtr; - -/// This describes the pointer type encapsulated by [`TaggedPtr`] and -/// [`CopyTaggedPtr`]. -/// -/// # Safety -/// -/// The pointer returned from [`into_ptr`] must be a [valid], pointer to -/// [`<Self as Deref>::Target`]. -/// -/// Note that if `Self` implements [`DerefMut`] the pointer returned from -/// [`into_ptr`] must be valid for writes (and thus calling [`NonNull::as_mut`] -/// on it must be safe). -/// -/// The [`BITS`] constant must be correct. [`BITS`] least-significant bits, -/// must be zero on all pointers returned from [`into_ptr`]. -/// -/// For example, if the alignment of [`Self::Target`] is 2, then `BITS` should be 1. -/// -/// [`BITS`]: Pointer::BITS -/// [`into_ptr`]: Pointer::into_ptr -/// [valid]: std::ptr#safety -/// [`<Self as Deref>::Target`]: Deref::Target -/// [`Self::Target`]: Deref::Target -/// [`DerefMut`]: std::ops::DerefMut -pub unsafe trait Pointer: Deref { - /// Number of unused (always zero) **least-significant bits** in this - /// pointer, usually related to the pointees alignment. - /// - /// For example if [`BITS`] = `2`, then given `ptr = Self::into_ptr(..)`, - /// `ptr.addr() & 0b11 == 0` must be true. - /// - /// Most likely the value you want to use here is the following, unless - /// your [`Self::Target`] type is unsized (e.g., `ty::List<T>` in rustc) - /// or your pointer is over/under aligned, in which case you'll need to - /// manually figure out what the right type to pass to [`bits_for`] is, or - /// what the value to set here. - /// - /// ```rust - /// # use std::ops::Deref; - /// # use rustc_data_structures::tagged_ptr::bits_for; - /// # struct T; - /// # impl Deref for T { type Target = u8; fn deref(&self) -> &u8 { &0 } } - /// # impl T { - /// const BITS: u32 = bits_for::<<Self as Deref>::Target>(); - /// # } - /// ``` - /// - /// [`BITS`]: Pointer::BITS - /// [`Self::Target`]: Deref::Target - const BITS: u32; - - /// Turns this pointer into a raw, non-null pointer. - /// - /// The inverse of this function is [`from_ptr`]. - /// - /// This function guarantees that the least-significant [`Self::BITS`] bits - /// are zero. - /// - /// [`from_ptr`]: Pointer::from_ptr - /// [`Self::BITS`]: Pointer::BITS - fn into_ptr(self) -> NonNull<Self::Target>; - - /// Re-creates the original pointer, from a raw pointer returned by [`into_ptr`]. - /// - /// # Safety - /// - /// The passed `ptr` must be returned from [`into_ptr`]. - /// - /// This acts as [`ptr::read::<Self>()`] semantically, it should not be called more than - /// once on non-[`Copy`] `Pointer`s. - /// - /// [`into_ptr`]: Pointer::into_ptr - /// [`ptr::read::<Self>()`]: std::ptr::read - unsafe fn from_ptr(ptr: NonNull<Self::Target>) -> Self; -} - -/// This describes tags that the [`TaggedPtr`] struct can hold. +/// This describes tags that the [`TaggedRef`] struct can hold. /// /// # Safety /// -/// The [`BITS`] constant must be correct. -/// -/// No more than [`BITS`] least-significant bits may be set in the returned usize. +/// - The [`BITS`] constant must be correct. +/// - No more than [`BITS`] least-significant bits may be set in the returned usize. +/// - [`Eq`] and [`Hash`] must be implementable with the returned `usize` from `into_usize`. /// /// [`BITS`]: Tag::BITS pub unsafe trait Tag: Copy { @@ -166,118 +76,217 @@ pub const fn bits_for_tags(mut tags: &[usize]) -> u32 { bits } -unsafe impl<T: ?Sized + Aligned> Pointer for Box<T> { - const BITS: u32 = bits_for::<Self::Target>(); +/// A covariant [`Copy`] tagged borrow. This is essentially `{ pointer: &'a P, tag: T }` packed +/// in a single reference. +pub struct TaggedRef<'a, Pointee: Aligned + ?Sized, T: Tag> { + /// This is semantically a pair of `pointer: &'a P` and `tag: T` fields, + /// however we pack them in a single pointer, to save space. + /// + /// We pack the tag into the **most**-significant bits of the pointer to + /// ease retrieval of the value. A left shift is a multiplication and + /// those are embeddable in instruction encoding, for example: + /// + /// ```asm + /// // (<https://godbolt.org/z/jqcYPWEr3>) + /// example::shift_read3: + /// mov eax, dword ptr [8*rdi] + /// ret + /// + /// example::mask_read3: + /// and rdi, -8 + /// mov eax, dword ptr [rdi] + /// ret + /// ``` + /// + /// This is ASM outputted by rustc for reads of values behind tagged + /// pointers for different approaches of tagging: + /// - `shift_read3` uses `<< 3` (the tag is in the most-significant bits) + /// - `mask_read3` uses `& !0b111` (the tag is in the least-significant bits) + /// + /// The shift approach thus produces less instructions and is likely faster + /// (see <https://godbolt.org/z/Y913sMdWb>). + /// + /// Encoding diagram: + /// ```text + /// [ packed.addr ] + /// [ tag ] [ pointer.addr >> T::BITS ] <-- usize::BITS - T::BITS bits + /// ^ + /// | + /// T::BITS bits + /// ``` + /// + /// The tag can be retrieved by `packed.addr() >> T::BITS` and the pointer + /// can be retrieved by `packed.map_addr(|addr| addr << T::BITS)`. + packed: NonNull<Pointee>, + tag_pointer_ghost: PhantomData<(&'a Pointee, T)>, +} +impl<'a, P, T> TaggedRef<'a, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ + /// Tags `pointer` with `tag`. + /// + /// [`TaggedRef`]: crate::tagged_ptr::TaggedRef #[inline] - fn into_ptr(self) -> NonNull<T> { - // Safety: pointers from `Box::into_raw` are valid & non-null - unsafe { NonNull::new_unchecked(Box::into_raw(self)) } + pub fn new(pointer: &'a P, tag: T) -> Self { + Self { packed: Self::pack(NonNull::from(pointer), tag), tag_pointer_ghost: PhantomData } } + /// Retrieves the pointer. #[inline] - unsafe fn from_ptr(ptr: NonNull<T>) -> Self { - // Safety: `ptr` comes from `into_ptr` which calls `Box::into_raw` - unsafe { Box::from_raw(ptr.as_ptr()) } + pub fn pointer(self) -> &'a P { + // SAFETY: pointer_raw returns the original pointer + unsafe { self.pointer_raw().as_ref() } } -} - -unsafe impl<T: ?Sized + Aligned> Pointer for Rc<T> { - const BITS: u32 = bits_for::<Self::Target>(); + /// Retrieves the tag. #[inline] - fn into_ptr(self) -> NonNull<T> { - // Safety: pointers from `Rc::into_raw` are valid & non-null - unsafe { NonNull::new_unchecked(Rc::into_raw(self).cast_mut()) } + pub fn tag(&self) -> T { + // Unpack the tag, according to the `self.packed` encoding scheme + let tag = self.packed.addr().get() >> Self::TAG_BIT_SHIFT; + + // Safety: + // The shift retrieves the original value from `T::into_usize`, + // satisfying `T::from_usize`'s preconditions. + unsafe { T::from_usize(tag) } } + /// Sets the tag to a new value. #[inline] - unsafe fn from_ptr(ptr: NonNull<T>) -> Self { - // Safety: `ptr` comes from `into_ptr` which calls `Rc::into_raw` - unsafe { Rc::from_raw(ptr.as_ptr()) } + pub fn set_tag(&mut self, tag: T) { + self.packed = Self::pack(self.pointer_raw(), tag); } -} -unsafe impl<T: ?Sized + Aligned> Pointer for Arc<T> { - const BITS: u32 = bits_for::<Self::Target>(); + const TAG_BIT_SHIFT: u32 = usize::BITS - T::BITS; + const ASSERTION: () = { assert!(T::BITS <= bits_for::<P>()) }; + /// Pack pointer `ptr` with a `tag`, according to `self.packed` encoding scheme. #[inline] - fn into_ptr(self) -> NonNull<T> { - // Safety: pointers from `Arc::into_raw` are valid & non-null - unsafe { NonNull::new_unchecked(Arc::into_raw(self).cast_mut()) } + fn pack(ptr: NonNull<P>, tag: T) -> NonNull<P> { + // Trigger assert! + let () = Self::ASSERTION; + + let packed_tag = tag.into_usize() << Self::TAG_BIT_SHIFT; + + ptr.map_addr(|addr| { + // Safety: + // - The pointer is `NonNull` => it's address is `NonZero<usize>` + // - `P::BITS` least significant bits are always zero (`Pointer` contract) + // - `T::BITS <= P::BITS` (from `Self::ASSERTION`) + // + // Thus `addr >> T::BITS` is guaranteed to be non-zero. + // + // `{non_zero} | packed_tag` can't make the value zero. + + let packed = (addr.get() >> T::BITS) | packed_tag; + unsafe { NonZero::new_unchecked(packed) } + }) } + /// Retrieves the original raw pointer from `self.packed`. #[inline] - unsafe fn from_ptr(ptr: NonNull<T>) -> Self { - // Safety: `ptr` comes from `into_ptr` which calls `Arc::into_raw` - unsafe { Arc::from_raw(ptr.as_ptr()) } + pub(super) fn pointer_raw(&self) -> NonNull<P> { + self.packed.map_addr(|addr| unsafe { NonZero::new_unchecked(addr.get() << T::BITS) }) } } -unsafe impl<'a, T: 'a + ?Sized + Aligned> Pointer for &'a T { - const BITS: u32 = bits_for::<Self::Target>(); +impl<P, T> Copy for TaggedRef<'_, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ +} +impl<P, T> Clone for TaggedRef<'_, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ #[inline] - fn into_ptr(self) -> NonNull<T> { - NonNull::from(self) + fn clone(&self) -> Self { + *self } +} + +impl<P, T> Deref for TaggedRef<'_, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ + type Target = P; #[inline] - unsafe fn from_ptr(ptr: NonNull<T>) -> Self { - // Safety: - // `ptr` comes from `into_ptr` which gets the pointer from a reference - unsafe { ptr.as_ref() } + fn deref(&self) -> &Self::Target { + self.pointer() } } -unsafe impl<'a, T: 'a + ?Sized + Aligned> Pointer for &'a mut T { - const BITS: u32 = bits_for::<Self::Target>(); +impl<P, T> fmt::Debug for TaggedRef<'_, P, T> +where + P: Aligned + fmt::Debug + ?Sized, + T: Tag + fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("TaggedRef") + .field("pointer", &self.pointer()) + .field("tag", &self.tag()) + .finish() + } +} +impl<P, T> PartialEq for TaggedRef<'_, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ #[inline] - fn into_ptr(self) -> NonNull<T> { - NonNull::from(self) + #[allow(ambiguous_wide_pointer_comparisons)] + fn eq(&self, other: &Self) -> bool { + self.packed == other.packed } +} +impl<P, T: Tag> Eq for TaggedRef<'_, P, T> {} + +impl<P, T: Tag> Hash for TaggedRef<'_, P, T> { #[inline] - unsafe fn from_ptr(mut ptr: NonNull<T>) -> Self { - // Safety: - // `ptr` comes from `into_ptr` which gets the pointer from a reference - unsafe { ptr.as_mut() } + fn hash<H: Hasher>(&self, state: &mut H) { + self.packed.hash(state); } } -/// A tag type used in [`CopyTaggedPtr`] and [`TaggedPtr`] tests. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -#[cfg(test)] -enum Tag2 { - B00 = 0b00, - B01 = 0b01, - B10 = 0b10, - B11 = 0b11, +impl<'a, P, T, HCX> HashStable<HCX> for TaggedRef<'a, P, T> +where + P: HashStable<HCX> + Aligned + ?Sized, + T: Tag + HashStable<HCX>, +{ + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { + self.pointer().hash_stable(hcx, hasher); + self.tag().hash_stable(hcx, hasher); + } } -#[cfg(test)] -unsafe impl Tag for Tag2 { - const BITS: u32 = 2; - - fn into_usize(self) -> usize { - self as _ - } +// Safety: +// `TaggedRef<P, T, ..>` is semantically just `{ ptr: P, tag: T }`, as such +// it's ok to implement `Sync` as long as `P: Sync, T: Sync` +unsafe impl<P, T> Sync for TaggedRef<'_, P, T> +where + P: Sync + Aligned + ?Sized, + T: Sync + Tag, +{ +} - unsafe fn from_usize(tag: usize) -> Self { - match tag { - 0b00 => Tag2::B00, - 0b01 => Tag2::B01, - 0b10 => Tag2::B10, - 0b11 => Tag2::B11, - _ => unreachable!(), - } - } +// Safety: +// `TaggedRef<P, T, ..>` is semantically just `{ ptr: P, tag: T }`, as such +// it's ok to implement `Send` as long as `P: Send, T: Send` +unsafe impl<P, T> Send for TaggedRef<'_, P, T> +where + P: Sync + Aligned + ?Sized, + T: Send + Tag, +{ } #[cfg(test)] -impl<HCX> crate::stable_hasher::HashStable<HCX> for Tag2 { - fn hash_stable(&self, hcx: &mut HCX, hasher: &mut crate::stable_hasher::StableHasher) { - (*self as u8).hash_stable(hcx, hasher); - } -} +mod tests; diff --git a/compiler/rustc_data_structures/src/tagged_ptr/copy.rs b/compiler/rustc_data_structures/src/tagged_ptr/copy.rs deleted file mode 100644 index 25e107b0f41..00000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/copy.rs +++ /dev/null @@ -1,330 +0,0 @@ -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::marker::PhantomData; -use std::mem::ManuallyDrop; -use std::num::NonZero; -use std::ops::{Deref, DerefMut}; -use std::ptr::NonNull; - -use super::{Pointer, Tag}; -use crate::stable_hasher::{HashStable, StableHasher}; - -/// A [`Copy`] tagged pointer. -/// -/// This is essentially `{ pointer: P, tag: T }` packed in a single pointer. -/// -/// You should use this instead of the [`TaggedPtr`] type in all cases where -/// `P` implements [`Copy`]. -/// -/// If `COMPARE_PACKED` is true, then the pointers will be compared and hashed without -/// unpacking. Otherwise we don't implement [`PartialEq`], [`Eq`] and [`Hash`]; -/// if you want that, wrap the [`CopyTaggedPtr`]. -/// -/// [`TaggedPtr`]: crate::tagged_ptr::TaggedPtr -pub struct CopyTaggedPtr<P, T, const COMPARE_PACKED: bool> -where - P: Pointer, - T: Tag, -{ - /// This is semantically a pair of `pointer: P` and `tag: T` fields, - /// however we pack them in a single pointer, to save space. - /// - /// We pack the tag into the **most**-significant bits of the pointer to - /// ease retrieval of the value. A left shift is a multiplication and - /// those are embeddable in instruction encoding, for example: - /// - /// ```asm - /// // (<https://godbolt.org/z/jqcYPWEr3>) - /// example::shift_read3: - /// mov eax, dword ptr [8*rdi] - /// ret - /// - /// example::mask_read3: - /// and rdi, -8 - /// mov eax, dword ptr [rdi] - /// ret - /// ``` - /// - /// This is ASM outputted by rustc for reads of values behind tagged - /// pointers for different approaches of tagging: - /// - `shift_read3` uses `<< 3` (the tag is in the most-significant bits) - /// - `mask_read3` uses `& !0b111` (the tag is in the least-significant bits) - /// - /// The shift approach thus produces less instructions and is likely faster - /// (see <https://godbolt.org/z/Y913sMdWb>). - /// - /// Encoding diagram: - /// ```text - /// [ packed.addr ] - /// [ tag ] [ pointer.addr >> T::BITS ] <-- usize::BITS - T::BITS bits - /// ^ - /// | - /// T::BITS bits - /// ``` - /// - /// The tag can be retrieved by `packed.addr() >> T::BITS` and the pointer - /// can be retrieved by `packed.map_addr(|addr| addr << T::BITS)`. - packed: NonNull<P::Target>, - tag_ghost: PhantomData<T>, -} - -// Note that even though `CopyTaggedPtr` is only really expected to work with -// `P: Copy`, can't add `P: Copy` bound, because `CopyTaggedPtr` is used in the -// `TaggedPtr`'s implementation. -impl<P, T, const CP: bool> CopyTaggedPtr<P, T, CP> -where - P: Pointer, - T: Tag, -{ - /// Tags `pointer` with `tag`. - /// - /// Note that this leaks `pointer`: it won't be dropped when - /// `CopyTaggedPtr` is dropped. If you have a pointer with a significant - /// drop, use [`TaggedPtr`] instead. - /// - /// [`TaggedPtr`]: crate::tagged_ptr::TaggedPtr - #[inline] - pub fn new(pointer: P, tag: T) -> Self { - Self { packed: Self::pack(P::into_ptr(pointer), tag), tag_ghost: PhantomData } - } - - /// Retrieves the pointer. - #[inline] - pub fn pointer(self) -> P - where - P: Copy, - { - // SAFETY: pointer_raw returns the original pointer - // - // Note that this isn't going to double-drop or anything because we have - // P: Copy - unsafe { P::from_ptr(self.pointer_raw()) } - } - - /// Retrieves the tag. - #[inline] - pub fn tag(&self) -> T { - // Unpack the tag, according to the `self.packed` encoding scheme - let tag = self.packed.addr().get() >> Self::TAG_BIT_SHIFT; - - // Safety: - // The shift retrieves the original value from `T::into_usize`, - // satisfying `T::from_usize`'s preconditions. - unsafe { T::from_usize(tag) } - } - - /// Sets the tag to a new value. - #[inline] - pub fn set_tag(&mut self, tag: T) { - self.packed = Self::pack(self.pointer_raw(), tag); - } - - const TAG_BIT_SHIFT: u32 = usize::BITS - T::BITS; - const ASSERTION: () = { assert!(T::BITS <= P::BITS) }; - - /// Pack pointer `ptr` that comes from [`P::into_ptr`] with a `tag`, - /// according to `self.packed` encoding scheme. - /// - /// [`P::into_ptr`]: Pointer::into_ptr - #[inline] - fn pack(ptr: NonNull<P::Target>, tag: T) -> NonNull<P::Target> { - // Trigger assert! - let () = Self::ASSERTION; - - let packed_tag = tag.into_usize() << Self::TAG_BIT_SHIFT; - - ptr.map_addr(|addr| { - // Safety: - // - The pointer is `NonNull` => it's address is `NonZero<usize>` - // - `P::BITS` least significant bits are always zero (`Pointer` contract) - // - `T::BITS <= P::BITS` (from `Self::ASSERTION`) - // - // Thus `addr >> T::BITS` is guaranteed to be non-zero. - // - // `{non_zero} | packed_tag` can't make the value zero. - - let packed = (addr.get() >> T::BITS) | packed_tag; - unsafe { NonZero::new_unchecked(packed) } - }) - } - - /// Retrieves the original raw pointer from `self.packed`. - #[inline] - pub(super) fn pointer_raw(&self) -> NonNull<P::Target> { - self.packed.map_addr(|addr| unsafe { NonZero::new_unchecked(addr.get() << T::BITS) }) - } - - /// This provides a reference to the `P` pointer itself, rather than the - /// `Deref::Target`. It is used for cases where we want to call methods - /// that may be implement differently for the Pointer than the Pointee - /// (e.g., `Rc::clone` vs cloning the inner value). - pub(super) fn with_pointer_ref<R>(&self, f: impl FnOnce(&P) -> R) -> R { - // Safety: - // - `self.raw.pointer_raw()` is originally returned from `P::into_ptr` - // and as such is valid for `P::from_ptr`. - // - This also allows us to not care whatever `f` panics or not. - // - Even though we create a copy of the pointer, we store it inside - // `ManuallyDrop` and only access it by-ref, so we don't double-drop. - // - // Semantically this is just `f(&self.pointer)` (where `self.pointer` - // is non-packed original pointer). - // - // Note that even though `CopyTaggedPtr` is only really expected to - // work with `P: Copy`, we have to assume `P: ?Copy`, because - // `CopyTaggedPtr` is used in the `TaggedPtr`'s implementation. - let ptr = unsafe { ManuallyDrop::new(P::from_ptr(self.pointer_raw())) }; - f(&ptr) - } -} - -impl<P, T, const CP: bool> Copy for CopyTaggedPtr<P, T, CP> -where - P: Pointer + Copy, - T: Tag, -{ -} - -impl<P, T, const CP: bool> Clone for CopyTaggedPtr<P, T, CP> -where - P: Pointer + Copy, - T: Tag, -{ - #[inline] - fn clone(&self) -> Self { - *self - } -} - -impl<P, T, const CP: bool> Deref for CopyTaggedPtr<P, T, CP> -where - P: Pointer, - T: Tag, -{ - type Target = P::Target; - - #[inline] - fn deref(&self) -> &Self::Target { - // Safety: - // `pointer_raw` returns the original pointer from `P::into_ptr` which, - // by the `Pointer`'s contract, must be valid. - unsafe { self.pointer_raw().as_ref() } - } -} - -impl<P, T, const CP: bool> DerefMut for CopyTaggedPtr<P, T, CP> -where - P: Pointer + DerefMut, - T: Tag, -{ - #[inline] - fn deref_mut(&mut self) -> &mut Self::Target { - // Safety: - // `pointer_raw` returns the original pointer from `P::into_ptr` which, - // by the `Pointer`'s contract, must be valid for writes if - // `P: DerefMut`. - unsafe { self.pointer_raw().as_mut() } - } -} - -impl<P, T, const CP: bool> fmt::Debug for CopyTaggedPtr<P, T, CP> -where - P: Pointer + fmt::Debug, - T: Tag + fmt::Debug, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.with_pointer_ref(|ptr| { - f.debug_struct("CopyTaggedPtr").field("pointer", ptr).field("tag", &self.tag()).finish() - }) - } -} - -impl<P, T> PartialEq for CopyTaggedPtr<P, T, true> -where - P: Pointer, - T: Tag, -{ - #[inline] - #[allow(ambiguous_wide_pointer_comparisons)] - fn eq(&self, other: &Self) -> bool { - self.packed == other.packed - } -} - -impl<P, T> Eq for CopyTaggedPtr<P, T, true> -where - P: Pointer, - T: Tag, -{ -} - -impl<P, T> Hash for CopyTaggedPtr<P, T, true> -where - P: Pointer, - T: Tag, -{ - #[inline] - fn hash<H: Hasher>(&self, state: &mut H) { - self.packed.hash(state); - } -} - -impl<P, T, HCX, const CP: bool> HashStable<HCX> for CopyTaggedPtr<P, T, CP> -where - P: Pointer + HashStable<HCX>, - T: Tag + HashStable<HCX>, -{ - fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { - self.with_pointer_ref(|ptr| ptr.hash_stable(hcx, hasher)); - self.tag().hash_stable(hcx, hasher); - } -} - -// Safety: -// `CopyTaggedPtr<P, T, ..>` is semantically just `{ ptr: P, tag: T }`, as such -// it's ok to implement `Sync` as long as `P: Sync, T: Sync` -unsafe impl<P, T, const CP: bool> Sync for CopyTaggedPtr<P, T, CP> -where - P: Sync + Pointer, - T: Sync + Tag, -{ -} - -// Safety: -// `CopyTaggedPtr<P, T, ..>` is semantically just `{ ptr: P, tag: T }`, as such -// it's ok to implement `Send` as long as `P: Send, T: Send` -unsafe impl<P, T, const CP: bool> Send for CopyTaggedPtr<P, T, CP> -where - P: Send + Pointer, - T: Send + Tag, -{ -} - -/// Test that `new` does not compile if there is not enough alignment for the -/// tag in the pointer. -/// -/// ```compile_fail,E0080 -/// use rustc_data_structures::tagged_ptr::{CopyTaggedPtr, Tag}; -/// -/// #[derive(Copy, Clone, Debug, PartialEq, Eq)] -/// enum Tag2 { B00 = 0b00, B01 = 0b01, B10 = 0b10, B11 = 0b11 }; -/// -/// unsafe impl Tag for Tag2 { -/// const BITS: u32 = 2; -/// -/// fn into_usize(self) -> usize { todo!() } -/// unsafe fn from_usize(tag: usize) -> Self { todo!() } -/// } -/// -/// let value = 12u16; -/// let reference = &value; -/// let tag = Tag2::B01; -/// -/// let _ptr = CopyTaggedPtr::<_, _, true>::new(reference, tag); -/// ``` -// For some reason miri does not get the compile error -// probably it `check`s instead of `build`ing? -#[cfg(not(miri))] -const _: () = (); - -#[cfg(test)] -mod tests; diff --git a/compiler/rustc_data_structures/src/tagged_ptr/copy/tests.rs b/compiler/rustc_data_structures/src/tagged_ptr/copy/tests.rs deleted file mode 100644 index 160af8a65d9..00000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/copy/tests.rs +++ /dev/null @@ -1,50 +0,0 @@ -use std::ptr; - -use crate::hashes::Hash128; -use crate::stable_hasher::{HashStable, StableHasher}; -use crate::tagged_ptr::{CopyTaggedPtr, Pointer, Tag, Tag2}; - -#[test] -fn smoke() { - let value = 12u32; - let reference = &value; - let tag = Tag2::B01; - - let ptr = tag_ptr(reference, tag); - - assert_eq!(ptr.tag(), tag); - assert_eq!(*ptr, 12); - assert!(ptr::eq(ptr.pointer(), reference)); - - let copy = ptr; - - let mut ptr = ptr; - ptr.set_tag(Tag2::B00); - assert_eq!(ptr.tag(), Tag2::B00); - - assert_eq!(copy.tag(), tag); - assert_eq!(*copy, 12); - assert!(ptr::eq(copy.pointer(), reference)); -} - -#[test] -fn stable_hash_hashes_as_tuple() { - let hash_packed = { - let mut hasher = StableHasher::new(); - tag_ptr(&12, Tag2::B11).hash_stable(&mut (), &mut hasher); - hasher.finish::<Hash128>() - }; - - let hash_tupled = { - let mut hasher = StableHasher::new(); - (&12, Tag2::B11).hash_stable(&mut (), &mut hasher); - hasher.finish::<Hash128>() - }; - - assert_eq!(hash_packed, hash_tupled); -} - -/// Helper to create tagged pointers without specifying `COMPARE_PACKED` if it does not matter. -fn tag_ptr<P: Pointer, T: Tag>(ptr: P, tag: T) -> CopyTaggedPtr<P, T, true> { - CopyTaggedPtr::new(ptr, tag) -} diff --git a/compiler/rustc_data_structures/src/tagged_ptr/drop.rs b/compiler/rustc_data_structures/src/tagged_ptr/drop.rs deleted file mode 100644 index 319a8cdd399..00000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/drop.rs +++ /dev/null @@ -1,178 +0,0 @@ -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::ops::{Deref, DerefMut}; - -use super::{CopyTaggedPtr, Pointer, Tag}; -use crate::stable_hasher::{HashStable, StableHasher}; - -/// A tagged pointer that supports pointers that implement [`Drop`]. -/// -/// This is essentially `{ pointer: P, tag: T }` packed in a single pointer. -/// -/// You should use [`CopyTaggedPtr`] instead of the this type in all cases -/// where `P` implements [`Copy`]. -/// -/// If `COMPARE_PACKED` is true, then the pointers will be compared and hashed without -/// unpacking. Otherwise we don't implement [`PartialEq`], [`Eq`] and [`Hash`]; -/// if you want that, wrap the [`TaggedPtr`]. -pub struct TaggedPtr<P, T, const COMPARE_PACKED: bool> -where - P: Pointer, - T: Tag, -{ - raw: CopyTaggedPtr<P, T, COMPARE_PACKED>, -} - -impl<P, T, const CP: bool> TaggedPtr<P, T, CP> -where - P: Pointer, - T: Tag, -{ - /// Tags `pointer` with `tag`. - #[inline] - pub fn new(pointer: P, tag: T) -> Self { - TaggedPtr { raw: CopyTaggedPtr::new(pointer, tag) } - } - - /// Retrieves the tag. - #[inline] - pub fn tag(&self) -> T { - self.raw.tag() - } - - /// Sets the tag to a new value. - #[inline] - pub fn set_tag(&mut self, tag: T) { - self.raw.set_tag(tag) - } -} - -impl<P, T, const CP: bool> Clone for TaggedPtr<P, T, CP> -where - P: Pointer + Clone, - T: Tag, -{ - fn clone(&self) -> Self { - let ptr = self.raw.with_pointer_ref(P::clone); - - Self::new(ptr, self.tag()) - } -} - -impl<P, T, const CP: bool> Deref for TaggedPtr<P, T, CP> -where - P: Pointer, - T: Tag, -{ - type Target = P::Target; - - #[inline] - fn deref(&self) -> &Self::Target { - self.raw.deref() - } -} - -impl<P, T, const CP: bool> DerefMut for TaggedPtr<P, T, CP> -where - P: Pointer + DerefMut, - T: Tag, -{ - #[inline] - fn deref_mut(&mut self) -> &mut Self::Target { - self.raw.deref_mut() - } -} - -impl<P, T, const CP: bool> Drop for TaggedPtr<P, T, CP> -where - P: Pointer, - T: Tag, -{ - fn drop(&mut self) { - // No need to drop the tag, as it's Copy - unsafe { - drop(P::from_ptr(self.raw.pointer_raw())); - } - } -} - -impl<P, T, const CP: bool> fmt::Debug for TaggedPtr<P, T, CP> -where - P: Pointer + fmt::Debug, - T: Tag + fmt::Debug, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.raw.with_pointer_ref(|ptr| { - f.debug_struct("TaggedPtr").field("pointer", ptr).field("tag", &self.tag()).finish() - }) - } -} - -impl<P, T> PartialEq for TaggedPtr<P, T, true> -where - P: Pointer, - T: Tag, -{ - #[inline] - fn eq(&self, other: &Self) -> bool { - self.raw.eq(&other.raw) - } -} - -impl<P, T> Eq for TaggedPtr<P, T, true> -where - P: Pointer, - T: Tag, -{ -} - -impl<P, T> Hash for TaggedPtr<P, T, true> -where - P: Pointer, - T: Tag, -{ - #[inline] - fn hash<H: Hasher>(&self, state: &mut H) { - self.raw.hash(state); - } -} - -impl<P, T, HCX, const CP: bool> HashStable<HCX> for TaggedPtr<P, T, CP> -where - P: Pointer + HashStable<HCX>, - T: Tag + HashStable<HCX>, -{ - fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { - self.raw.hash_stable(hcx, hasher); - } -} - -/// Test that `new` does not compile if there is not enough alignment for the -/// tag in the pointer. -/// -/// ```compile_fail,E0080 -/// use rustc_data_structures::tagged_ptr::{TaggedPtr, Tag}; -/// -/// #[derive(Copy, Clone, Debug, PartialEq, Eq)] -/// enum Tag2 { B00 = 0b00, B01 = 0b01, B10 = 0b10, B11 = 0b11 }; -/// -/// unsafe impl Tag for Tag2 { -/// const BITS: u32 = 2; -/// -/// fn into_usize(self) -> usize { todo!() } -/// unsafe fn from_usize(tag: usize) -> Self { todo!() } -/// } -/// -/// let value = 12u16; -/// let reference = &value; -/// let tag = Tag2::B01; -/// -/// let _ptr = TaggedPtr::<_, _, true>::new(reference, tag); -/// ``` -// For some reason miri does not get the compile error -// probably it `check`s instead of `build`ing? -#[cfg(not(miri))] -const _: () = (); - -#[cfg(test)] -mod tests; diff --git a/compiler/rustc_data_structures/src/tagged_ptr/drop/tests.rs b/compiler/rustc_data_structures/src/tagged_ptr/drop/tests.rs deleted file mode 100644 index 4d342c72cc5..00000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/drop/tests.rs +++ /dev/null @@ -1,72 +0,0 @@ -use std::ptr; -use std::sync::Arc; - -use crate::tagged_ptr::{Pointer, Tag, Tag2, TaggedPtr}; - -#[test] -fn smoke() { - let value = 12u32; - let reference = &value; - let tag = Tag2::B01; - - let ptr = tag_ptr(reference, tag); - - assert_eq!(ptr.tag(), tag); - assert_eq!(*ptr, 12); - - let clone = ptr.clone(); - assert_eq!(clone.tag(), tag); - assert_eq!(*clone, 12); - - let mut ptr = ptr; - ptr.set_tag(Tag2::B00); - assert_eq!(ptr.tag(), Tag2::B00); - - assert_eq!(clone.tag(), tag); - assert_eq!(*clone, 12); - assert!(ptr::eq(&*ptr, &*clone)) -} - -#[test] -fn boxed() { - let value = 12u32; - let boxed = Box::new(value); - let tag = Tag2::B01; - - let ptr = tag_ptr(boxed, tag); - - assert_eq!(ptr.tag(), tag); - assert_eq!(*ptr, 12); - - let clone = ptr.clone(); - assert_eq!(clone.tag(), tag); - assert_eq!(*clone, 12); - - let mut ptr = ptr; - ptr.set_tag(Tag2::B00); - assert_eq!(ptr.tag(), Tag2::B00); - - assert_eq!(clone.tag(), tag); - assert_eq!(*clone, 12); - assert!(!ptr::eq(&*ptr, &*clone)) -} - -#[test] -fn arclones() { - let value = 12u32; - let arc = Arc::new(value); - let tag = Tag2::B01; - - let ptr = tag_ptr(arc, tag); - - assert_eq!(ptr.tag(), tag); - assert_eq!(*ptr, 12); - - let clone = ptr.clone(); - assert!(ptr::eq(&*ptr, &*clone)) -} - -/// Helper to create tagged pointers without specifying `COMPARE_PACKED` if it does not matter. -fn tag_ptr<P: Pointer, T: Tag>(ptr: P, tag: T) -> TaggedPtr<P, T, true> { - TaggedPtr::new(ptr, tag) -} diff --git a/compiler/rustc_data_structures/src/tagged_ptr/impl_tag.rs b/compiler/rustc_data_structures/src/tagged_ptr/impl_tag.rs deleted file mode 100644 index f17a0bf26d7..00000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/impl_tag.rs +++ /dev/null @@ -1,144 +0,0 @@ -/// Implements [`Tag`] for a given type. -/// -/// You can use `impl_tag` on structs and enums. -/// You need to specify the type and all its possible values, -/// which can only be paths with optional fields. -/// -/// [`Tag`]: crate::tagged_ptr::Tag -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// #![feature(macro_metavar_expr)] -/// use rustc_data_structures::{impl_tag, tagged_ptr::Tag}; -/// -/// #[derive(Copy, Clone, PartialEq, Debug)] -/// enum SomeTag { -/// A, -/// B, -/// X { v: bool }, -/// Y(bool, bool), -/// } -/// -/// impl_tag! { -/// // The type for which the `Tag` will be implemented -/// impl Tag for SomeTag; -/// // You need to specify all possible tag values: -/// SomeTag::A, // 0 -/// SomeTag::B, // 1 -/// // For variants with fields, you need to specify the fields: -/// SomeTag::X { v: true }, // 2 -/// SomeTag::X { v: false }, // 3 -/// // For tuple variants use named syntax: -/// SomeTag::Y { 0: true, 1: true }, // 4 -/// SomeTag::Y { 0: false, 1: true }, // 5 -/// SomeTag::Y { 0: true, 1: false }, // 6 -/// SomeTag::Y { 0: false, 1: false }, // 7 -/// } -/// -/// // Tag values are assigned in order: -/// assert_eq!(SomeTag::A.into_usize(), 0); -/// assert_eq!(SomeTag::X { v: false }.into_usize(), 3); -/// assert_eq!(SomeTag::Y(false, true).into_usize(), 5); -/// -/// assert_eq!(unsafe { SomeTag::from_usize(1) }, SomeTag::B); -/// assert_eq!(unsafe { SomeTag::from_usize(2) }, SomeTag::X { v: true }); -/// assert_eq!(unsafe { SomeTag::from_usize(7) }, SomeTag::Y(false, false)); -/// ``` -/// -/// Structs are supported: -/// -/// ``` -/// #![feature(macro_metavar_expr)] -/// # use rustc_data_structures::impl_tag; -/// #[derive(Copy, Clone)] -/// struct Flags { a: bool, b: bool } -/// -/// impl_tag! { -/// impl Tag for Flags; -/// Flags { a: true, b: true }, -/// Flags { a: false, b: true }, -/// Flags { a: true, b: false }, -/// Flags { a: false, b: false }, -/// } -/// ``` -/// -/// Not specifying all values results in a compile error: -/// -/// ```compile_fail,E0004 -/// #![feature(macro_metavar_expr)] -/// # use rustc_data_structures::impl_tag; -/// #[derive(Copy, Clone)] -/// enum E { -/// A, -/// B, -/// } -/// -/// impl_tag! { -/// impl Tag for E; -/// E::A, -/// } -/// ``` -#[macro_export] -macro_rules! impl_tag { - ( - impl Tag for $Self:ty; - $( - $($path:ident)::* $( { $( $fields:tt )* })?, - )* - ) => { - // Safety: - // `bits_for_tags` is called on the same `${index()}`-es as - // `into_usize` returns, thus `BITS` constant is correct. - unsafe impl $crate::tagged_ptr::Tag for $Self { - const BITS: u32 = $crate::tagged_ptr::bits_for_tags(&[ - $( - ${index()}, - $( ${ignore($path)} )* - )* - ]); - - #[inline] - fn into_usize(self) -> usize { - // This forbids use of repeating patterns (`Enum::V`&`Enum::V`, etc) - // (or at least it should, see <https://github.com/rust-lang/rust/issues/110613>) - #[forbid(unreachable_patterns)] - match self { - // `match` is doing heavy lifting here, by requiring exhaustiveness - $( - $($path)::* $( { $( $fields )* } )? => ${index()}, - )* - } - } - - #[inline] - unsafe fn from_usize(tag: usize) -> Self { - match tag { - $( - ${index()} => $($path)::* $( { $( $fields )* } )?, - )* - - // Safety: - // `into_usize` only returns `${index()}` of the same - // repetition as we are filtering above, thus if this is - // reached, the safety contract of this function was - // already breached. - _ => unsafe { - debug_assert!( - false, - "invalid tag: {tag}\ - (this is a bug in the caller of `from_usize`)" - ); - std::hint::unreachable_unchecked() - }, - } - } - - } - }; -} - -#[cfg(test)] -mod tests; diff --git a/compiler/rustc_data_structures/src/tagged_ptr/impl_tag/tests.rs b/compiler/rustc_data_structures/src/tagged_ptr/impl_tag/tests.rs deleted file mode 100644 index 62c926153e1..00000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/impl_tag/tests.rs +++ /dev/null @@ -1,34 +0,0 @@ -#[test] -fn bits_constant() { - use crate::tagged_ptr::Tag; - - #[derive(Copy, Clone)] - struct Unit; - impl_tag! { impl Tag for Unit; Unit, } - assert_eq!(Unit::BITS, 0); - - #[derive(Copy, Clone)] - enum Enum3 { - A, - B, - C, - } - impl_tag! { impl Tag for Enum3; Enum3::A, Enum3::B, Enum3::C, } - assert_eq!(Enum3::BITS, 2); - - #[derive(Copy, Clone)] - struct Eight(bool, bool, bool); - impl_tag! { - impl Tag for Eight; - Eight { 0: true, 1: true, 2: true }, - Eight { 0: true, 1: true, 2: false }, - Eight { 0: true, 1: false, 2: true }, - Eight { 0: true, 1: false, 2: false }, - Eight { 0: false, 1: true, 2: true }, - Eight { 0: false, 1: true, 2: false }, - Eight { 0: false, 1: false, 2: true }, - Eight { 0: false, 1: false, 2: false }, - } - - assert_eq!(Eight::BITS, 3); -} diff --git a/compiler/rustc_data_structures/src/tagged_ptr/tests.rs b/compiler/rustc_data_structures/src/tagged_ptr/tests.rs new file mode 100644 index 00000000000..b1bdee18d6d --- /dev/null +++ b/compiler/rustc_data_structures/src/tagged_ptr/tests.rs @@ -0,0 +1,105 @@ +use std::ptr; + +use super::*; +use crate::hashes::Hash128; +use crate::stable_hasher::{HashStable, StableHasher}; + +/// A tag type used in [`TaggedRef`] tests. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum Tag2 { + B00 = 0b00, + B01 = 0b01, + B10 = 0b10, + B11 = 0b11, +} + +unsafe impl Tag for Tag2 { + const BITS: u32 = 2; + + fn into_usize(self) -> usize { + self as _ + } + + unsafe fn from_usize(tag: usize) -> Self { + match tag { + 0b00 => Tag2::B00, + 0b01 => Tag2::B01, + 0b10 => Tag2::B10, + 0b11 => Tag2::B11, + _ => unreachable!(), + } + } +} + +impl<HCX> crate::stable_hasher::HashStable<HCX> for Tag2 { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut crate::stable_hasher::StableHasher) { + (*self as u8).hash_stable(hcx, hasher); + } +} + +#[test] +fn smoke() { + let value = 12u32; + let reference = &value; + let tag = Tag2::B01; + + let ptr = TaggedRef::new(reference, tag); + + assert_eq!(ptr.tag(), tag); + assert_eq!(*ptr, 12); + assert!(ptr::eq(ptr.pointer(), reference)); + + let copy = ptr; + + let mut ptr = ptr; + ptr.set_tag(Tag2::B00); + assert_eq!(ptr.tag(), Tag2::B00); + + assert_eq!(copy.tag(), tag); + assert_eq!(*copy, 12); + assert!(ptr::eq(copy.pointer(), reference)); +} + +#[test] +fn stable_hash_hashes_as_tuple() { + let hash_packed = { + let mut hasher = StableHasher::new(); + TaggedRef::new(&12, Tag2::B11).hash_stable(&mut (), &mut hasher); + hasher.finish::<Hash128>() + }; + + let hash_tupled = { + let mut hasher = StableHasher::new(); + (&12, Tag2::B11).hash_stable(&mut (), &mut hasher); + hasher.finish::<Hash128>() + }; + + assert_eq!(hash_packed, hash_tupled); +} + +/// Test that `new` does not compile if there is not enough alignment for the +/// tag in the pointer. +/// +/// ```compile_fail,E0080 +/// use rustc_data_structures::tagged_ptr::{TaggedRef, Tag}; +/// +/// #[derive(Copy, Clone, Debug, PartialEq, Eq)] +/// enum Tag2 { B00 = 0b00, B01 = 0b01, B10 = 0b10, B11 = 0b11 }; +/// +/// unsafe impl Tag for Tag2 { +/// const BITS: u32 = 2; +/// +/// fn into_usize(self) -> usize { todo!() } +/// unsafe fn from_usize(tag: usize) -> Self { todo!() } +/// } +/// +/// let value = 12u16; +/// let reference = &value; +/// let tag = Tag2::B01; +/// +/// let _ptr = TaggedRef::<_, _, true>::new(reference, tag); +/// ``` +// For some reason miri does not get the compile error +// probably it `check`s instead of `build`ing? +#[cfg(not(miri))] +const _: () = (); diff --git a/compiler/rustc_data_structures/src/work_queue.rs b/compiler/rustc_data_structures/src/work_queue.rs index ca052e2eac6..815756edfeb 100644 --- a/compiler/rustc_data_structures/src/work_queue.rs +++ b/compiler/rustc_data_structures/src/work_queue.rs @@ -1,7 +1,7 @@ use std::collections::VecDeque; use rustc_index::Idx; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; /// A work queue is a handy data structure for tracking work left to /// do. (For example, basic blocks left to process.) It is basically a @@ -11,14 +11,14 @@ use rustc_index::bit_set::BitSet; /// and also use a bit set to track occupancy. pub struct WorkQueue<T: Idx> { deque: VecDeque<T>, - set: BitSet<T>, + set: DenseBitSet<T>, } impl<T: Idx> WorkQueue<T> { /// Creates a new work queue that starts empty, where elements range from (0..len). #[inline] pub fn with_none(len: usize) -> Self { - WorkQueue { deque: VecDeque::with_capacity(len), set: BitSet::new_empty(len) } + WorkQueue { deque: VecDeque::with_capacity(len), set: DenseBitSet::new_empty(len) } } /// Attempt to enqueue `element` in the work queue. Returns false if it was already present. |
