about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2020-08-19 20:20:31 +0000
committerbors <bors@rust-lang.org>2020-08-19 20:20:31 +0000
commit32c654a9795b0d88541e56ba9da4150e34f1d5f9 (patch)
tree93d81cd6fa34aaacdbde42299a4cdfeaeac7bb82 /src
parent9900178cba95369cd5822c8ce579edcc89ffeb76 (diff)
parent107e2904bffed4c2fb2af1ce92dadd1ad3752049 (diff)
downloadrust-32c654a9795b0d88541e56ba9da4150e34f1d5f9.tar.gz
rust-32c654a9795b0d88541e56ba9da4150e34f1d5f9.zip
Auto merge of #75590 - Mark-Simulacrum:tagged-ptr, r=ecstatic-morse
Add a packed/tagged pointer abstraction and utilize it for ParamEnv

The intent here is mostly just to add the abstraction; I suspect that there are definitely more use cases for it, and we can explore those over time now that there's a (mostly) safe abstraction that can be used in rustc.
Diffstat (limited to 'src')
-rw-r--r--src/librustc_data_structures/lib.rs4
-rw-r--r--src/librustc_data_structures/tagged_ptr.rs157
-rw-r--r--src/librustc_data_structures/tagged_ptr/copy.rs183
-rw-r--r--src/librustc_data_structures/tagged_ptr/drop.rs142
-rw-r--r--src/librustc_middle/ty/list.rs15
-rw-r--r--src/librustc_middle/ty/mod.rs92
6 files changed, 533 insertions, 60 deletions
diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs
index 1937f615e70..af4a7bd1881 100644
--- a/src/librustc_data_structures/lib.rs
+++ b/src/librustc_data_structures/lib.rs
@@ -7,6 +7,7 @@
 //! This API is completely unstable and subject to change.
 
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
+#![allow(incomplete_features)]
 #![feature(in_band_lifetimes)]
 #![feature(unboxed_closures)]
 #![feature(generators)]
@@ -23,6 +24,8 @@
 #![feature(associated_type_bounds)]
 #![feature(thread_id_value)]
 #![feature(extend_one)]
+#![feature(const_panic)]
+#![feature(const_generics)]
 #![allow(rustc::default_hash_types)]
 
 #[macro_use]
@@ -97,6 +100,7 @@ pub mod vec_linked_list;
 pub mod work_queue;
 pub use atomic_ref::AtomicRef;
 pub mod frozen;
+pub mod tagged_ptr;
 pub mod temp_dir;
 
 pub struct OnDrop<F: Fn()>(pub F);
diff --git a/src/librustc_data_structures/tagged_ptr.rs b/src/librustc_data_structures/tagged_ptr.rs
new file mode 100644
index 00000000000..e3839d19365
--- /dev/null
+++ b/src/librustc_data_structures/tagged_ptr.rs
@@ -0,0 +1,157 @@
+//! This module implements tagged pointers.
+//!
+//! In order to utilize the pointer packing, you must have two types: a pointer,
+//! and a tag.
+//!
+//! The pointer must implement the `Pointer` trait, with the primary requirement
+//! being conversion to and from a usize. Note that the pointer must be
+//! dereferenceable, so raw pointers generally cannot implement the `Pointer`
+//! trait. This implies that the pointer must also be nonzero.
+//!
+//! Many common pointer types already implement the `Pointer` trait.
+//!
+//! The tag must implement the `Tag` trait. We assert that the tag and `Pointer`
+//! are compatible at compile time.
+
+use std::mem::ManuallyDrop;
+use std::ops::Deref;
+use std::rc::Rc;
+use std::sync::Arc;
+
+mod copy;
+mod drop;
+
+pub use copy::CopyTaggedPtr;
+pub use drop::TaggedPtr;
+
+/// This describes the pointer type encaspulated by TaggedPtr.
+///
+/// # Safety
+///
+/// The usize returned from `into_usize` must be a valid, dereferenceable,
+/// pointer to `<Self as Deref>::Target`. Note that pointers to `Pointee` must
+/// be thin, even though `Pointee` may not be sized.
+///
+/// Note that the returned pointer from `into_usize` should be castable to `&mut
+/// <Self as Deref>::Target` if `Pointer: DerefMut`.
+///
+/// The BITS constant must be correct. At least `BITS` bits, least-significant,
+/// must be zero on all returned pointers from `into_usize`.
+///
+/// For example, if the alignment of `Pointee` is 2, then `BITS` should be 1.
+pub unsafe trait Pointer: Deref {
+    /// Most likely the value you want to use here is the following, unless
+    /// your Pointee type is unsized (e.g., `ty::List<T>` in rustc) in which
+    /// case you'll need to manually figure out what the right type to pass to
+    /// align_of is.
+    ///
+    /// ```rust
+    /// std::mem::align_of::<<Self as Deref>::Target>().trailing_zeros() as usize;
+    /// ```
+    const BITS: usize;
+    fn into_usize(self) -> usize;
+
+    /// # Safety
+    ///
+    /// The passed `ptr` must be returned from `into_usize`.
+    ///
+    /// This acts as `ptr::read` semantically, it should not be called more than
+    /// once on non-`Copy` `Pointer`s.
+    unsafe fn from_usize(ptr: usize) -> Self;
+
+    /// This provides a reference to the `Pointer` itself, rather than the
+    /// `Deref::Target`. It is used for cases where we want to call methods that
+    /// may be implement differently for the Pointer than the Pointee (e.g.,
+    /// `Rc::clone` vs cloning the inner value).
+    ///
+    /// # Safety
+    ///
+    /// The passed `ptr` must be returned from `into_usize`.
+    unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R;
+}
+
+/// This describes tags that the `TaggedPtr` struct can hold.
+///
+/// # Safety
+///
+/// The BITS constant must be correct.
+///
+/// No more than `BITS` least significant bits may be set in the returned usize.
+pub unsafe trait Tag: Copy {
+    const BITS: usize;
+
+    fn into_usize(self) -> usize;
+
+    /// # Safety
+    ///
+    /// The passed `tag` must be returned from `into_usize`.
+    unsafe fn from_usize(tag: usize) -> Self;
+}
+
+unsafe impl<T> Pointer for Box<T> {
+    const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+    fn into_usize(self) -> usize {
+        Box::into_raw(self) as usize
+    }
+    unsafe fn from_usize(ptr: usize) -> Self {
+        Box::from_raw(ptr as *mut T)
+    }
+    unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+        let raw = ManuallyDrop::new(Self::from_usize(ptr));
+        f(&raw)
+    }
+}
+
+unsafe impl<T> Pointer for Rc<T> {
+    const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+    fn into_usize(self) -> usize {
+        Rc::into_raw(self) as usize
+    }
+    unsafe fn from_usize(ptr: usize) -> Self {
+        Rc::from_raw(ptr as *const T)
+    }
+    unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+        let raw = ManuallyDrop::new(Self::from_usize(ptr));
+        f(&raw)
+    }
+}
+
+unsafe impl<T> Pointer for Arc<T> {
+    const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+    fn into_usize(self) -> usize {
+        Arc::into_raw(self) as usize
+    }
+    unsafe fn from_usize(ptr: usize) -> Self {
+        Arc::from_raw(ptr as *const T)
+    }
+    unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+        let raw = ManuallyDrop::new(Self::from_usize(ptr));
+        f(&raw)
+    }
+}
+
+unsafe impl<'a, T: 'a> Pointer for &'a T {
+    const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+    fn into_usize(self) -> usize {
+        self as *const T as usize
+    }
+    unsafe fn from_usize(ptr: usize) -> Self {
+        &*(ptr as *const T)
+    }
+    unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+        f(&*(&ptr as *const usize as *const Self))
+    }
+}
+
+unsafe impl<'a, T: 'a> Pointer for &'a mut T {
+    const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+    fn into_usize(self) -> usize {
+        self as *mut T as usize
+    }
+    unsafe fn from_usize(ptr: usize) -> Self {
+        &mut *(ptr as *mut T)
+    }
+    unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+        f(&*(&ptr as *const usize as *const Self))
+    }
+}
diff --git a/src/librustc_data_structures/tagged_ptr/copy.rs b/src/librustc_data_structures/tagged_ptr/copy.rs
new file mode 100644
index 00000000000..d39d146db31
--- /dev/null
+++ b/src/librustc_data_structures/tagged_ptr/copy.rs
@@ -0,0 +1,183 @@
+use super::{Pointer, Tag};
+use crate::stable_hasher::{HashStable, StableHasher};
+use std::fmt;
+use std::marker::PhantomData;
+use std::num::NonZeroUsize;
+
+/// A `Copy` TaggedPtr.
+///
+/// You should use this instead of the `TaggedPtr` type in all cases where
+/// `P: Copy`.
+///
+/// If `COMPARE_PACKED` is true, then the pointers will be compared and hashed without
+/// unpacking. Otherwise we don't implement PartialEq/Eq/Hash; if you want that,
+/// wrap the TaggedPtr.
+pub struct CopyTaggedPtr<P, T, const COMPARE_PACKED: bool>
+where
+    P: Pointer,
+    T: Tag,
+{
+    packed: NonZeroUsize,
+    data: PhantomData<(P, T)>,
+}
+
+impl<P, T, const COMPARE_PACKED: bool> Copy for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    T: Tag,
+    P: Copy,
+{
+}
+
+impl<P, T, const COMPARE_PACKED: bool> Clone for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    T: Tag,
+    P: Copy,
+{
+    fn clone(&self) -> Self {
+        *self
+    }
+}
+
+// We pack the tag into the *upper* bits of the pointer to ease retrieval of the
+// value; a left shift is a multiplication and those are embeddable in
+// instruction encoding.
+impl<P, T, const COMPARE_PACKED: bool> CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    T: Tag,
+{
+    const TAG_BIT_SHIFT: usize = (8 * std::mem::size_of::<usize>()) - T::BITS;
+    const ASSERTION: () = {
+        assert!(T::BITS <= P::BITS);
+        // Used for the transmute_copy's below
+        assert!(std::mem::size_of::<&P::Target>() == std::mem::size_of::<usize>());
+    };
+
+    pub fn new(pointer: P, tag: T) -> Self {
+        // Trigger assert!
+        let () = Self::ASSERTION;
+        let packed_tag = tag.into_usize() << Self::TAG_BIT_SHIFT;
+
+        Self {
+            // SAFETY: We know that the pointer is non-null, as it must be
+            // dereferenceable per `Pointer` safety contract.
+            packed: unsafe {
+                NonZeroUsize::new_unchecked((P::into_usize(pointer) >> T::BITS) | packed_tag)
+            },
+            data: PhantomData,
+        }
+    }
+
+    pub(super) fn pointer_raw(&self) -> usize {
+        self.packed.get() << T::BITS
+    }
+    pub fn pointer(self) -> P
+    where
+        P: Copy,
+    {
+        // SAFETY: pointer_raw returns the original pointer
+        //
+        // Note that this isn't going to double-drop or anything because we have
+        // P: Copy
+        unsafe { P::from_usize(self.pointer_raw()) }
+    }
+    pub fn pointer_ref(&self) -> &P::Target {
+        // SAFETY: pointer_raw returns the original pointer
+        unsafe { std::mem::transmute_copy(&self.pointer_raw()) }
+    }
+    pub fn pointer_mut(&mut self) -> &mut P::Target
+    where
+        P: std::ops::DerefMut,
+    {
+        // SAFETY: pointer_raw returns the original pointer
+        unsafe { std::mem::transmute_copy(&self.pointer_raw()) }
+    }
+    pub fn tag(&self) -> T {
+        unsafe { T::from_usize(self.packed.get() >> Self::TAG_BIT_SHIFT) }
+    }
+    pub fn set_tag(&mut self, tag: T) {
+        let mut packed = self.packed.get();
+        let new_tag = T::into_usize(tag) << Self::TAG_BIT_SHIFT;
+        let tag_mask = (1 << T::BITS) - 1;
+        packed &= !(tag_mask << Self::TAG_BIT_SHIFT);
+        packed |= new_tag;
+        self.packed = unsafe { NonZeroUsize::new_unchecked(packed) };
+    }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> std::ops::Deref for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    T: Tag,
+{
+    type Target = P::Target;
+    fn deref(&self) -> &Self::Target {
+        self.pointer_ref()
+    }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> std::ops::DerefMut for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer + std::ops::DerefMut,
+    T: Tag,
+{
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        self.pointer_mut()
+    }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> fmt::Debug for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    P::Target: fmt::Debug,
+    T: Tag + fmt::Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("CopyTaggedPtr")
+            .field("pointer", &self.pointer_ref())
+            .field("tag", &self.tag())
+            .finish()
+    }
+}
+
+impl<P, T> PartialEq for CopyTaggedPtr<P, T, true>
+where
+    P: Pointer,
+    T: Tag,
+{
+    fn eq(&self, other: &Self) -> bool {
+        self.packed == other.packed
+    }
+}
+
+impl<P, T> Eq for CopyTaggedPtr<P, T, true>
+where
+    P: Pointer,
+    T: Tag,
+{
+}
+
+impl<P, T> std::hash::Hash for CopyTaggedPtr<P, T, true>
+where
+    P: Pointer,
+    T: Tag,
+{
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        self.packed.hash(state);
+    }
+}
+
+impl<P, T, HCX, const COMPARE_PACKED: bool> HashStable<HCX> for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer + HashStable<HCX>,
+    T: Tag + HashStable<HCX>,
+{
+    fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
+        unsafe {
+            Pointer::with_ref(self.pointer_raw(), |p: &P| p.hash_stable(hcx, hasher));
+        }
+        self.tag().hash_stable(hcx, hasher);
+    }
+}
diff --git a/src/librustc_data_structures/tagged_ptr/drop.rs b/src/librustc_data_structures/tagged_ptr/drop.rs
new file mode 100644
index 00000000000..63f64beae5a
--- /dev/null
+++ b/src/librustc_data_structures/tagged_ptr/drop.rs
@@ -0,0 +1,142 @@
+use super::{Pointer, Tag};
+use crate::stable_hasher::{HashStable, StableHasher};
+use std::fmt;
+
+use super::CopyTaggedPtr;
+
+/// A TaggedPtr implementing `Drop`.
+///
+/// If `COMPARE_PACKED` is true, then the pointers will be compared and hashed without
+/// unpacking. Otherwise we don't implement PartialEq/Eq/Hash; if you want that,
+/// wrap the TaggedPtr.
+pub struct TaggedPtr<P, T, const COMPARE_PACKED: bool>
+where
+    P: Pointer,
+    T: Tag,
+{
+    raw: CopyTaggedPtr<P, T, COMPARE_PACKED>,
+}
+
+impl<P, T, const COMPARE_PACKED: bool> Clone for TaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer + Clone,
+    T: Tag,
+{
+    fn clone(&self) -> Self {
+        unsafe { Self::new(P::with_ref(self.raw.pointer_raw(), |p| p.clone()), self.raw.tag()) }
+    }
+}
+
+// We pack the tag into the *upper* bits of the pointer to ease retrieval of the
+// value; a right shift is a multiplication and those are embeddable in
+// instruction encoding.
+impl<P, T, const COMPARE_PACKED: bool> TaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    T: Tag,
+{
+    pub fn new(pointer: P, tag: T) -> Self {
+        TaggedPtr { raw: CopyTaggedPtr::new(pointer, tag) }
+    }
+
+    pub fn pointer_ref(&self) -> &P::Target {
+        self.raw.pointer_ref()
+    }
+    pub fn pointer_mut(&mut self) -> &mut P::Target
+    where
+        P: std::ops::DerefMut,
+    {
+        self.raw.pointer_mut()
+    }
+    pub fn tag(&self) -> T {
+        self.raw.tag()
+    }
+    pub fn set_tag(&mut self, tag: T) {
+        self.raw.set_tag(tag);
+    }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> std::ops::Deref for TaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    T: Tag,
+{
+    type Target = P::Target;
+    fn deref(&self) -> &Self::Target {
+        self.raw.pointer_ref()
+    }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> std::ops::DerefMut for TaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer + std::ops::DerefMut,
+    T: Tag,
+{
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        self.raw.pointer_mut()
+    }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> Drop for TaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    T: Tag,
+{
+    fn drop(&mut self) {
+        // No need to drop the tag, as it's Copy
+        unsafe {
+            std::mem::drop(P::from_usize(self.raw.pointer_raw()));
+        }
+    }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> fmt::Debug for TaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer,
+    P::Target: fmt::Debug,
+    T: Tag + fmt::Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TaggedPtr")
+            .field("pointer", &self.pointer_ref())
+            .field("tag", &self.tag())
+            .finish()
+    }
+}
+
+impl<P, T> PartialEq for TaggedPtr<P, T, true>
+where
+    P: Pointer,
+    T: Tag,
+{
+    fn eq(&self, other: &Self) -> bool {
+        self.raw.eq(&other.raw)
+    }
+}
+
+impl<P, T> Eq for TaggedPtr<P, T, true>
+where
+    P: Pointer,
+    T: Tag,
+{
+}
+
+impl<P, T> std::hash::Hash for TaggedPtr<P, T, true>
+where
+    P: Pointer,
+    T: Tag,
+{
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        self.raw.hash(state);
+    }
+}
+
+impl<P, T, HCX, const COMPARE_PACKED: bool> HashStable<HCX> for TaggedPtr<P, T, COMPARE_PACKED>
+where
+    P: Pointer + HashStable<HCX>,
+    T: Tag + HashStable<HCX>,
+{
+    fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
+        self.raw.hash_stable(hcx, hasher);
+    }
+}
diff --git a/src/librustc_middle/ty/list.rs b/src/librustc_middle/ty/list.rs
index fe390adf89f..83a2bdf90f9 100644
--- a/src/librustc_middle/ty/list.rs
+++ b/src/librustc_middle/ty/list.rs
@@ -35,6 +35,21 @@ pub struct List<T> {
     opaque: OpaqueListContents,
 }
 
+unsafe impl<'a, T: 'a> rustc_data_structures::tagged_ptr::Pointer for &'a List<T> {
+    const BITS: usize = std::mem::align_of::<usize>().trailing_zeros() as usize;
+    fn into_usize(self) -> usize {
+        self as *const List<T> as usize
+    }
+    unsafe fn from_usize(ptr: usize) -> Self {
+        &*(ptr as *const List<T>)
+    }
+    unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+        // Self: Copy so this is fine
+        let ptr = Self::from_usize(ptr);
+        f(&ptr)
+    }
+}
+
 unsafe impl<T: Sync> Sync for List<T> {}
 
 impl<T: Copy> List<T> {
diff --git a/src/librustc_middle/ty/mod.rs b/src/librustc_middle/ty/mod.rs
index 02fd18ef968..4fa86a91254 100644
--- a/src/librustc_middle/ty/mod.rs
+++ b/src/librustc_middle/ty/mod.rs
@@ -27,6 +27,7 @@ use rustc_data_structures::fx::FxIndexMap;
 use rustc_data_structures::sorted_map::SortedIndexMultiMap;
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
 use rustc_data_structures::sync::{self, par_iter, ParallelIterator};
+use rustc_data_structures::tagged_ptr::CopyTaggedPtr;
 use rustc_errors::ErrorReported;
 use rustc_hir as hir;
 use rustc_hir::def::{CtorKind, CtorOf, DefKind, Namespace, Res};
@@ -46,7 +47,6 @@ use std::cell::RefCell;
 use std::cmp::Ordering;
 use std::fmt;
 use std::hash::{Hash, Hasher};
-use std::marker::PhantomData;
 use std::ops::Range;
 use std::ptr;
 use std::str;
@@ -1713,34 +1713,21 @@ impl WithOptConstParam<DefId> {
 /// When type checking, we use the `ParamEnv` to track
 /// details about the set of where-clauses that are in scope at this
 /// particular point.
-#[derive(Copy, Clone)]
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
 pub struct ParamEnv<'tcx> {
-    // We pack the caller_bounds List pointer and a Reveal enum into this usize.
-    // Specifically, the low bit represents Reveal, with 0 meaning `UserFacing`
-    // and 1 meaning `All`. The rest is the pointer.
-    //
-    // This relies on the List<Predicate<'tcx>> type having at least 2-byte
-    // alignment. Lists start with a usize and are repr(C) so this should be
-    // fine; there is a debug_assert in the constructor as well.
-    //
-    // Note that the choice of 0 for UserFacing is intentional -- since it is the
-    // first variant in Reveal this means that joining the pointer is a simple `or`.
-    packed_data: usize,
-
-    /// `Obligation`s that the caller must satisfy. This is basically
-    /// the set of bounds on the in-scope type parameters, translated
+    /// This packs both caller bounds and the reveal enum into one pointer.
+    ///
+    /// Caller bounds are `Obligation`s that the caller must satisfy. This is
+    /// basically the set of bounds on the in-scope type parameters, translated
     /// into `Obligation`s, and elaborated and normalized.
     ///
-    /// Note: This is packed into the `packed_data` usize above, use the
-    /// `caller_bounds()` method to access it.
-    caller_bounds: PhantomData<&'tcx List<Predicate<'tcx>>>,
-
+    /// Use the `caller_bounds()` method to access.
+    ///
     /// Typically, this is `Reveal::UserFacing`, but during codegen we
     /// want `Reveal::All`.
     ///
-    /// Note: This is packed into the caller_bounds usize above, use the reveal()
-    /// method to access it.
-    reveal: PhantomData<traits::Reveal>,
+    /// Note: This is packed, use the reveal() method to access it.
+    packed: CopyTaggedPtr<&'tcx List<Predicate<'tcx>>, traits::Reveal, true>,
 
     /// If this `ParamEnv` comes from a call to `tcx.param_env(def_id)`,
     /// register that `def_id` (useful for transitioning to the chalk trait
@@ -1748,6 +1735,23 @@ pub struct ParamEnv<'tcx> {
     pub def_id: Option<DefId>,
 }
 
+unsafe impl rustc_data_structures::tagged_ptr::Tag for traits::Reveal {
+    const BITS: usize = 1;
+    fn into_usize(self) -> usize {
+        match self {
+            traits::Reveal::UserFacing => 0,
+            traits::Reveal::All => 1,
+        }
+    }
+    unsafe fn from_usize(ptr: usize) -> Self {
+        match ptr {
+            0 => traits::Reveal::UserFacing,
+            1 => traits::Reveal::All,
+            _ => std::hint::unreachable_unchecked(),
+        }
+    }
+}
+
 impl<'tcx> fmt::Debug for ParamEnv<'tcx> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         f.debug_struct("ParamEnv")
@@ -1758,24 +1762,6 @@ impl<'tcx> fmt::Debug for ParamEnv<'tcx> {
     }
 }
 
-impl<'tcx> Hash for ParamEnv<'tcx> {
-    fn hash<H: Hasher>(&self, state: &mut H) {
-        // List hashes as the raw pointer, so we can skip splitting into the
-        // pointer and the enum.
-        self.packed_data.hash(state);
-        self.def_id.hash(state);
-    }
-}
-
-impl<'tcx> PartialEq for ParamEnv<'tcx> {
-    fn eq(&self, other: &Self) -> bool {
-        self.caller_bounds() == other.caller_bounds()
-            && self.reveal() == other.reveal()
-            && self.def_id == other.def_id
-    }
-}
-impl<'tcx> Eq for ParamEnv<'tcx> {}
-
 impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ParamEnv<'tcx> {
     fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
         self.caller_bounds().hash_stable(hcx, hasher);
@@ -1812,13 +1798,12 @@ impl<'tcx> ParamEnv<'tcx> {
 
     #[inline]
     pub fn caller_bounds(self) -> &'tcx List<Predicate<'tcx>> {
-        // mask out bottom bit
-        unsafe { &*((self.packed_data & (!1)) as *const _) }
+        self.packed.pointer()
     }
 
     #[inline]
     pub fn reveal(self) -> traits::Reveal {
-        if self.packed_data & 1 == 0 { traits::Reveal::UserFacing } else { traits::Reveal::All }
+        self.packed.tag()
     }
 
     /// Construct a trait environment with no where-clauses in scope
@@ -1840,24 +1825,11 @@ impl<'tcx> ParamEnv<'tcx> {
         reveal: Reveal,
         def_id: Option<DefId>,
     ) -> Self {
-        let packed_data = caller_bounds as *const _ as usize;
-        // Check that we can pack the reveal data into the pointer.
-        debug_assert!(packed_data & 1 == 0);
-        ty::ParamEnv {
-            packed_data: packed_data
-                | match reveal {
-                    Reveal::UserFacing => 0,
-                    Reveal::All => 1,
-                },
-            caller_bounds: PhantomData,
-            reveal: PhantomData,
-            def_id,
-        }
+        ty::ParamEnv { packed: CopyTaggedPtr::new(caller_bounds, reveal), def_id }
     }
 
     pub fn with_user_facing(mut self) -> Self {
-        // clear bottom bit
-        self.packed_data &= !1;
+        self.packed.set_tag(Reveal::UserFacing);
         self
     }
 
@@ -1871,7 +1843,7 @@ impl<'tcx> ParamEnv<'tcx> {
     /// will be normalized to their underlying types.
     /// See PR #65989 and issue #65918 for more details
     pub fn with_reveal_all_normalized(self, tcx: TyCtxt<'tcx>) -> Self {
-        if self.packed_data & 1 == 1 {
+        if self.packed.tag() == traits::Reveal::All {
             return self;
         }