about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2020-07-26 09:09:43 +0000
committerbors <bors@rust-lang.org>2020-07-26 09:09:43 +0000
commit13f9aa190957b993a268fd4a046fce76ca8814ee (patch)
treee3afb4f7850e5aba24cdfe044303d17a97c04daf /src
parent461707c5a119cc33c5d7df585ddb6cbec4a081bf (diff)
parentef9c4f5cef53c547b6f0af7c3049abb319368419 (diff)
downloadrust-13f9aa190957b993a268fd4a046fce76ca8814ee.tar.gz
rust-13f9aa190957b993a268fd4a046fce76ca8814ee.zip
Auto merge of #74664 - pnadon:Miri-rename-undef-uninit, r=RalfJung
Miri rename undef uninit

Renamed parts of code within the `librustc_middle/mir/interpret/` directory.

Related issue [#71193](https://github.com/rust-lang/rust/issues/71193)
Diffstat (limited to 'src')
-rw-r--r--src/librustc_codegen_ssa/mir/block.rs2
-rw-r--r--src/librustc_middle/mir/interpret/allocation.rs94
-rw-r--r--src/librustc_middle/mir/interpret/value.rs30
-rw-r--r--src/librustc_mir/const_eval/eval_queries.rs2
-rw-r--r--src/librustc_mir/interpret/intrinsics.rs10
-rw-r--r--src/librustc_mir/interpret/memory.rs18
-rw-r--r--src/librustc_mir/interpret/operand.rs8
-rw-r--r--src/librustc_mir/interpret/place.rs6
-rw-r--r--src/librustc_mir/interpret/terminator.rs2
-rw-r--r--src/librustc_mir/interpret/traits.rs8
-rw-r--r--src/librustc_mir/interpret/validity.rs6
-rw-r--r--src/librustc_mir_build/hair/pattern/_match.rs2
12 files changed, 94 insertions, 94 deletions
diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs
index f9e1094ff73..e8270b4fa0a 100644
--- a/src/librustc_codegen_ssa/mir/block.rs
+++ b/src/librustc_codegen_ssa/mir/block.rs
@@ -883,7 +883,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                                 let ptr = Pointer::new(AllocId(0), offset);
                                 alloc
                                     .read_scalar(&bx, ptr, size)
-                                    .and_then(|s| s.not_undef())
+                                    .and_then(|s| s.check_init())
                                     .unwrap_or_else(|e| {
                                         bx.tcx().sess.span_err(
                                             span,
diff --git a/src/librustc_middle/mir/interpret/allocation.rs b/src/librustc_middle/mir/interpret/allocation.rs
index 96195db0bac..dd4fc7adff1 100644
--- a/src/librustc_middle/mir/interpret/allocation.rs
+++ b/src/librustc_middle/mir/interpret/allocation.rs
@@ -105,7 +105,7 @@ impl<Tag> Allocation<Tag> {
         Allocation::from_bytes(slice, Align::from_bytes(1).unwrap())
     }
 
-    pub fn undef(size: Size, align: Align) -> Self {
+    pub fn uninit(size: Size, align: Align) -> Self {
         Allocation {
             bytes: vec![0; size.bytes_usize()],
             relocations: Relocations::new(),
@@ -153,7 +153,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
         self.size.bytes_usize()
     }
 
-    /// Looks at a slice which may describe undefined bytes or describe a relocation. This differs
+    /// Looks at a slice which may describe uninitialized bytes or describe a relocation. This differs
     /// from `get_bytes_with_undef_and_ptr` in that it does no relocation checks (even on the
     /// edges) at all. It further ignores `AllocationExtra` callbacks.
     /// This must not be used for reads affecting the interpreter execution.
@@ -192,7 +192,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
         offset.bytes_usize()..end
     }
 
-    /// The last argument controls whether we error out when there are undefined
+    /// The last argument controls whether we error out when there are uninitialized
     /// or pointer bytes. You should never call this, call `get_bytes` or
     /// `get_bytes_with_undef_and_ptr` instead,
     ///
@@ -206,12 +206,12 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         size: Size,
-        check_defined_and_ptr: bool,
+        check_init_and_ptr: bool,
     ) -> InterpResult<'tcx, &[u8]> {
         let range = self.check_bounds(ptr.offset, size);
 
-        if check_defined_and_ptr {
-            self.check_defined(ptr, size)?;
+        if check_init_and_ptr {
+            self.check_init(ptr, size)?;
             self.check_relocations(cx, ptr, size)?;
         } else {
             // We still don't want relocations on the *edges*.
@@ -239,7 +239,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
         self.get_bytes_internal(cx, ptr, size, true)
     }
 
-    /// It is the caller's responsibility to handle undefined and pointer bytes.
+    /// It is the caller's responsibility to handle uninitialized and pointer bytes.
     /// However, this still checks that there are no relocations on the *edges*.
     ///
     /// It is the caller's responsibility to check bounds and alignment beforehand.
@@ -267,7 +267,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
     ) -> InterpResult<'tcx, &mut [u8]> {
         let range = self.check_bounds(ptr.offset, size);
 
-        self.mark_definedness(ptr, size, true);
+        self.mark_init(ptr, size, true);
         self.clear_relocations(cx, ptr, size)?;
 
         AllocationExtra::memory_written(self, ptr, size)?;
@@ -303,7 +303,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
 
     /// Validates that `ptr.offset` and `ptr.offset + size` do not point to the middle of a
     /// relocation. If `allow_ptr_and_undef` is `false`, also enforces that the memory in the
-    /// given range contains neither relocations nor undef bytes.
+    /// given range contains neither relocations nor uninitialized bytes.
     pub fn check_bytes(
         &self,
         cx: &impl HasDataLayout,
@@ -313,9 +313,9 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
     ) -> InterpResult<'tcx> {
         // Check bounds and relocations on the edges.
         self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
-        // Check undef and ptr.
+        // Check uninit and ptr.
         if !allow_ptr_and_undef {
-            self.check_defined(ptr, size)?;
+            self.check_init(ptr, size)?;
             self.check_relocations(cx, ptr, size)?;
         }
         Ok(())
@@ -364,7 +364,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
         let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
         // Uninit check happens *after* we established that the alignment is correct.
         // We must not return `Ok()` for unaligned pointers!
-        if self.is_defined(ptr, size).is_err() {
+        if self.is_init(ptr, size).is_err() {
             // This inflates uninitialized bytes to the entire scalar, even if only a few
             // bytes are uninitialized.
             return Ok(ScalarMaybeUninit::Uninit);
@@ -416,7 +416,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
         let val = match val {
             ScalarMaybeUninit::Scalar(scalar) => scalar,
             ScalarMaybeUninit::Uninit => {
-                self.mark_definedness(ptr, type_size, false);
+                self.mark_init(ptr, type_size, false);
                 return Ok(());
             }
         };
@@ -512,7 +512,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
         let start = ptr.offset;
         let end = start + size; // `Size` addition
 
-        // Mark parts of the outermost relocations as undefined if they partially fall outside the
+        // Mark parts of the outermost relocations as uninitialized if they partially fall outside the
         // given range.
         if first < start {
             self.init_mask.set_range(first, start, false);
@@ -542,20 +542,20 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
     }
 }
 
-/// Undefined bytes.
+/// Uninitialized bytes.
 impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
-    /// Checks whether the given range  is entirely defined.
+    /// Checks whether the given range  is entirely initialized.
     ///
-    /// Returns `Ok(())` if it's defined. Otherwise returns the range of byte
-    /// indexes of the first contiguous undefined access.
-    fn is_defined(&self, ptr: Pointer<Tag>, size: Size) -> Result<(), Range<Size>> {
+    /// Returns `Ok(())` if it's initialized. Otherwise returns the range of byte
+    /// indexes of the first contiguous uninitialized access.
+    fn is_init(&self, ptr: Pointer<Tag>, size: Size) -> Result<(), Range<Size>> {
         self.init_mask.is_range_initialized(ptr.offset, ptr.offset + size) // `Size` addition
     }
 
-    /// Checks that a range of bytes is defined. If not, returns the `InvalidUndefBytes`
-    /// error which will report the first range of bytes which is undefined.
-    fn check_defined(&self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
-        self.is_defined(ptr, size).or_else(|idx_range| {
+    /// Checks that a range of bytes is initialized. If not, returns the `InvalidUninitBytes`
+    /// error which will report the first range of bytes which is uninitialized.
+    fn check_init(&self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
+        self.is_init(ptr, size).or_else(|idx_range| {
             throw_ub!(InvalidUninitBytes(Some(Box::new(UninitBytesAccess {
                 access_ptr: ptr.erase_tag(),
                 access_size: size,
@@ -565,44 +565,44 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
         })
     }
 
-    pub fn mark_definedness(&mut self, ptr: Pointer<Tag>, size: Size, new_state: bool) {
+    pub fn mark_init(&mut self, ptr: Pointer<Tag>, size: Size, is_init: bool) {
         if size.bytes() == 0 {
             return;
         }
-        self.init_mask.set_range(ptr.offset, ptr.offset + size, new_state);
+        self.init_mask.set_range(ptr.offset, ptr.offset + size, is_init);
     }
 }
 
-/// Run-length encoding of the undef mask.
+/// Run-length encoding of the uninit mask.
 /// Used to copy parts of a mask multiple times to another allocation.
-pub struct AllocationDefinedness {
-    /// The definedness of the first range.
+pub struct InitMaskCompressed {
+    /// Whether the first range is initialized.
     initial: bool,
     /// The lengths of ranges that are run-length encoded.
-    /// The definedness of the ranges alternate starting with `initial`.
+    /// The initialization state of the ranges alternate starting with `initial`.
     ranges: smallvec::SmallVec<[u64; 1]>,
 }
 
-impl AllocationDefinedness {
-    pub fn all_bytes_undef(&self) -> bool {
-        // The `ranges` are run-length encoded and of alternating definedness.
-        // So if `ranges.len() > 1` then the second block is a range of defined.
+impl InitMaskCompressed {
+    pub fn no_bytes_init(&self) -> bool {
+        // The `ranges` are run-length encoded and of alternating initialization state.
+        // So if `ranges.len() > 1` then the second block is an initialized range.
         !self.initial && self.ranges.len() == 1
     }
 }
 
-/// Transferring the definedness mask to other allocations.
+/// Transferring the initialization mask to other allocations.
 impl<Tag, Extra> Allocation<Tag, Extra> {
-    /// Creates a run-length encoding of the undef mask.
-    pub fn compress_undef_range(&self, src: Pointer<Tag>, size: Size) -> AllocationDefinedness {
+    /// Creates a run-length encoding of the initialization mask.
+    pub fn compress_undef_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCompressed {
         // Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
-        // a naive undef mask copying algorithm would repeatedly have to read the undef mask from
+        // a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from
         // the source and write it to the destination. Even if we optimized the memory accesses,
         // we'd be doing all of this `repeat` times.
-        // Therefore we precompute a compressed version of the undef mask of the source value and
+        // Therefore we precompute a compressed version of the initialization mask of the source value and
         // then write it back `repeat` times without computing any more information from the source.
 
-        // A precomputed cache for ranges of defined/undefined bits
+        // A precomputed cache for ranges of initialized / uninitialized bits
         // 0000010010001110 will become
         // `[5, 1, 2, 1, 3, 3, 1]`,
         // where each element toggles the state.
@@ -613,7 +613,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
         let mut cur = initial;
 
         for i in 1..size.bytes() {
-            // FIXME: optimize to bitshift the current undef block's bits and read the top bit.
+            // FIXME: optimize to bitshift the current uninitialized block's bits and read the top bit.
             if self.init_mask.get(src.offset + Size::from_bytes(i)) == cur {
                 cur_len += 1;
             } else {
@@ -625,13 +625,13 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
 
         ranges.push(cur_len);
 
-        AllocationDefinedness { ranges, initial }
+        InitMaskCompressed { ranges, initial }
     }
 
-    /// Applies multiple instances of the run-length encoding to the undef mask.
-    pub fn mark_compressed_undef_range(
+    /// Applies multiple instances of the run-length encoding to the initialization mask.
+    pub fn mark_compressed_init_range(
         &mut self,
-        defined: &AllocationDefinedness,
+        defined: &InitMaskCompressed,
         dest: Pointer<Tag>,
         size: Size,
         repeat: u64,
@@ -740,7 +740,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
 }
 
 ////////////////////////////////////////////////////////////////////////////////
-// Undefined byte tracking
+// Uninitialized byte tracking
 ////////////////////////////////////////////////////////////////////////////////
 
 type Block = u64;
@@ -778,11 +778,11 @@ impl InitMask {
 
         match idx {
             Some(idx) => {
-                let undef_end = (idx.bytes()..end.bytes())
+                let uninit_end = (idx.bytes()..end.bytes())
                     .map(Size::from_bytes)
                     .find(|&i| self.get(i))
                     .unwrap_or(end);
-                Err(idx..undef_end)
+                Err(idx..uninit_end)
             }
             None => Ok(()),
         }
diff --git a/src/librustc_middle/mir/interpret/value.rs b/src/librustc_middle/mir/interpret/value.rs
index ba2a2bd8a02..9de2d17457a 100644
--- a/src/librustc_middle/mir/interpret/value.rs
+++ b/src/librustc_middle/mir/interpret/value.rs
@@ -606,7 +606,7 @@ impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
     }
 
     #[inline]
-    pub fn not_undef(self) -> InterpResult<'static, Scalar<Tag>> {
+    pub fn check_init(self) -> InterpResult<'static, Scalar<Tag>> {
         match self {
             ScalarMaybeUninit::Scalar(scalar) => Ok(scalar),
             ScalarMaybeUninit::Uninit => throw_ub!(InvalidUninitBytes(None)),
@@ -615,72 +615,72 @@ impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
 
     #[inline(always)]
     pub fn to_bool(self) -> InterpResult<'tcx, bool> {
-        self.not_undef()?.to_bool()
+        self.check_init()?.to_bool()
     }
 
     #[inline(always)]
     pub fn to_char(self) -> InterpResult<'tcx, char> {
-        self.not_undef()?.to_char()
+        self.check_init()?.to_char()
     }
 
     #[inline(always)]
     pub fn to_f32(self) -> InterpResult<'tcx, Single> {
-        self.not_undef()?.to_f32()
+        self.check_init()?.to_f32()
     }
 
     #[inline(always)]
     pub fn to_f64(self) -> InterpResult<'tcx, Double> {
-        self.not_undef()?.to_f64()
+        self.check_init()?.to_f64()
     }
 
     #[inline(always)]
     pub fn to_u8(self) -> InterpResult<'tcx, u8> {
-        self.not_undef()?.to_u8()
+        self.check_init()?.to_u8()
     }
 
     #[inline(always)]
     pub fn to_u16(self) -> InterpResult<'tcx, u16> {
-        self.not_undef()?.to_u16()
+        self.check_init()?.to_u16()
     }
 
     #[inline(always)]
     pub fn to_u32(self) -> InterpResult<'tcx, u32> {
-        self.not_undef()?.to_u32()
+        self.check_init()?.to_u32()
     }
 
     #[inline(always)]
     pub fn to_u64(self) -> InterpResult<'tcx, u64> {
-        self.not_undef()?.to_u64()
+        self.check_init()?.to_u64()
     }
 
     #[inline(always)]
     pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
-        self.not_undef()?.to_machine_usize(cx)
+        self.check_init()?.to_machine_usize(cx)
     }
 
     #[inline(always)]
     pub fn to_i8(self) -> InterpResult<'tcx, i8> {
-        self.not_undef()?.to_i8()
+        self.check_init()?.to_i8()
     }
 
     #[inline(always)]
     pub fn to_i16(self) -> InterpResult<'tcx, i16> {
-        self.not_undef()?.to_i16()
+        self.check_init()?.to_i16()
     }
 
     #[inline(always)]
     pub fn to_i32(self) -> InterpResult<'tcx, i32> {
-        self.not_undef()?.to_i32()
+        self.check_init()?.to_i32()
     }
 
     #[inline(always)]
     pub fn to_i64(self) -> InterpResult<'tcx, i64> {
-        self.not_undef()?.to_i64()
+        self.check_init()?.to_i64()
     }
 
     #[inline(always)]
     pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> {
-        self.not_undef()?.to_machine_isize(cx)
+        self.check_init()?.to_machine_isize(cx)
     }
 }
 
diff --git a/src/librustc_mir/const_eval/eval_queries.rs b/src/librustc_mir/const_eval/eval_queries.rs
index 705a1b2ae79..42fba8982d2 100644
--- a/src/librustc_mir/const_eval/eval_queries.rs
+++ b/src/librustc_mir/const_eval/eval_queries.rs
@@ -154,7 +154,7 @@ pub(super) fn op_to_const<'tcx>(
                 ScalarMaybeUninit::Uninit => to_const_value(op.assert_mem_place(ecx)),
             },
             Immediate::ScalarPair(a, b) => {
-                let (data, start) = match a.not_undef().unwrap() {
+                let (data, start) = match a.check_init().unwrap() {
                     Scalar::Ptr(ptr) => {
                         (ecx.tcx.global_alloc(ptr.alloc_id).unwrap_memory(), ptr.offset.bytes())
                     }
diff --git a/src/librustc_mir/interpret/intrinsics.rs b/src/librustc_mir/interpret/intrinsics.rs
index 29549041d25..39ed3b60793 100644
--- a/src/librustc_mir/interpret/intrinsics.rs
+++ b/src/librustc_mir/interpret/intrinsics.rs
@@ -150,7 +150,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             | sym::bitreverse => {
                 let ty = substs.type_at(0);
                 let layout_of = self.layout_of(ty)?;
-                let val = self.read_scalar(args[0])?.not_undef()?;
+                let val = self.read_scalar(args[0])?.check_init()?;
                 let bits = self.force_bits(val, layout_of.size)?;
                 let kind = match layout_of.abi {
                     Abi::Scalar(ref scalar) => scalar.value,
@@ -281,9 +281,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                 // rotate_left: (X << (S % BW)) | (X >> ((BW - S) % BW))
                 // rotate_right: (X << ((BW - S) % BW)) | (X >> (S % BW))
                 let layout = self.layout_of(substs.type_at(0))?;
-                let val = self.read_scalar(args[0])?.not_undef()?;
+                let val = self.read_scalar(args[0])?.check_init()?;
                 let val_bits = self.force_bits(val, layout.size)?;
-                let raw_shift = self.read_scalar(args[1])?.not_undef()?;
+                let raw_shift = self.read_scalar(args[1])?.check_init()?;
                 let raw_shift_bits = self.force_bits(raw_shift, layout.size)?;
                 let width_bits = u128::from(layout.size.bits());
                 let shift_bits = raw_shift_bits % width_bits;
@@ -298,7 +298,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                 self.write_scalar(result, dest)?;
             }
             sym::offset => {
-                let ptr = self.read_scalar(args[0])?.not_undef()?;
+                let ptr = self.read_scalar(args[0])?.check_init()?;
                 let offset_count = self.read_scalar(args[1])?.to_machine_isize(self)?;
                 let pointee_ty = substs.type_at(0);
 
@@ -306,7 +306,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                 self.write_scalar(offset_ptr, dest)?;
             }
             sym::arith_offset => {
-                let ptr = self.read_scalar(args[0])?.not_undef()?;
+                let ptr = self.read_scalar(args[0])?.check_init()?;
                 let offset_count = self.read_scalar(args[1])?.to_machine_isize(self)?;
                 let pointee_ty = substs.type_at(0);
 
diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs
index ae837f8e165..39d2df96834 100644
--- a/src/librustc_mir/interpret/memory.rs
+++ b/src/librustc_mir/interpret/memory.rs
@@ -171,7 +171,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
         align: Align,
         kind: MemoryKind<M::MemoryKind>,
     ) -> Pointer<M::PointerTag> {
-        let alloc = Allocation::undef(size, align);
+        let alloc = Allocation::uninit(size, align);
         self.allocate_with(alloc, kind)
     }
 
@@ -907,18 +907,18 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
 
         let dest_bytes = dest_bytes.as_mut_ptr();
 
-        // Prepare a copy of the undef mask.
+        // Prepare a copy of the initialization mask.
         let compressed = self.get_raw(src.alloc_id)?.compress_undef_range(src, size);
 
-        if compressed.all_bytes_undef() {
-            // Fast path: If all bytes are `undef` then there is nothing to copy. The target range
-            // is marked as undef but we otherwise omit changing the byte representation which may
-            // be arbitrary for undef bytes.
+        if compressed.no_bytes_init() {
+            // Fast path: If all bytes are `uninit` then there is nothing to copy. The target range
+            // is marked as unititialized but we otherwise omit changing the byte representation which may
+            // be arbitrary for uninitialized bytes.
             // This also avoids writing to the target bytes so that the backing allocation is never
-            // touched if the bytes stay undef for the whole interpreter execution. On contemporary
+            // touched if the bytes stay uninitialized for the whole interpreter execution. On contemporary
             // operating system this can avoid physically allocating the page.
             let dest_alloc = self.get_raw_mut(dest.alloc_id)?;
-            dest_alloc.mark_definedness(dest, size * length, false); // `Size` multiplication
+            dest_alloc.mark_init(dest, size * length, false); // `Size` multiplication
             dest_alloc.mark_relocation_range(relocations);
             return Ok(());
         }
@@ -958,7 +958,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
         }
 
         // now fill in all the data
-        self.get_raw_mut(dest.alloc_id)?.mark_compressed_undef_range(
+        self.get_raw_mut(dest.alloc_id)?.mark_compressed_init_range(
             &compressed,
             dest,
             size,
diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs
index face72d70ce..9f86f2bc588 100644
--- a/src/librustc_mir/interpret/operand.rs
+++ b/src/librustc_mir/interpret/operand.rs
@@ -63,7 +63,7 @@ impl<'tcx, Tag> Immediate<Tag> {
     }
 
     #[inline]
-    pub fn to_scalar_or_undef(self) -> ScalarMaybeUninit<Tag> {
+    pub fn to_scalar_or_uninit(self) -> ScalarMaybeUninit<Tag> {
         match self {
             Immediate::Scalar(val) => val,
             Immediate::ScalarPair(..) => bug!("Got a wide pointer where a scalar was expected"),
@@ -72,14 +72,14 @@ impl<'tcx, Tag> Immediate<Tag> {
 
     #[inline]
     pub fn to_scalar(self) -> InterpResult<'tcx, Scalar<Tag>> {
-        self.to_scalar_or_undef().not_undef()
+        self.to_scalar_or_uninit().check_init()
     }
 
     #[inline]
     pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
         match self {
             Immediate::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
-            Immediate::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?)),
+            Immediate::ScalarPair(a, b) => Ok((a.check_init()?, b.check_init()?)),
         }
     }
 }
@@ -333,7 +333,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         &self,
         op: OpTy<'tcx, M::PointerTag>,
     ) -> InterpResult<'tcx, ScalarMaybeUninit<M::PointerTag>> {
-        Ok(self.read_immediate(op)?.to_scalar_or_undef())
+        Ok(self.read_immediate(op)?.to_scalar_or_uninit())
     }
 
     // Turn the wide MPlace into a string (must already be dereferenced!)
diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs
index 270be986064..80a5e56059d 100644
--- a/src/librustc_mir/interpret/place.rs
+++ b/src/librustc_mir/interpret/place.rs
@@ -292,9 +292,9 @@ where
             val.layout.ty.builtin_deref(true).expect("`ref_to_mplace` called on non-ptr type").ty;
         let layout = self.layout_of(pointee_type)?;
         let (ptr, meta) = match *val {
-            Immediate::Scalar(ptr) => (ptr.not_undef()?, MemPlaceMeta::None),
+            Immediate::Scalar(ptr) => (ptr.check_init()?, MemPlaceMeta::None),
             Immediate::ScalarPair(ptr, meta) => {
-                (ptr.not_undef()?, MemPlaceMeta::Meta(meta.not_undef()?))
+                (ptr.check_init()?, MemPlaceMeta::Meta(meta.check_init()?))
             }
         };
 
@@ -541,7 +541,7 @@ where
                 let n = self.access_local(self.frame(), local, Some(layout))?;
                 let n = self.read_scalar(n)?;
                 let n = u64::try_from(
-                    self.force_bits(n.not_undef()?, self.tcx.data_layout.pointer_size)?,
+                    self.force_bits(n.check_init()?, self.tcx.data_layout.pointer_size)?,
                 )
                 .unwrap();
                 self.mplace_index(base, n)?
diff --git a/src/librustc_mir/interpret/terminator.rs b/src/librustc_mir/interpret/terminator.rs
index 663f61b1155..9a036a0f299 100644
--- a/src/librustc_mir/interpret/terminator.rs
+++ b/src/librustc_mir/interpret/terminator.rs
@@ -58,7 +58,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                 let (fn_val, abi) = match func.layout.ty.kind {
                     ty::FnPtr(sig) => {
                         let caller_abi = sig.abi();
-                        let fn_ptr = self.read_scalar(func)?.not_undef()?;
+                        let fn_ptr = self.read_scalar(func)?.check_init()?;
                         let fn_val = self.memory.get_fn(fn_ptr)?;
                         (fn_val, caller_abi)
                     }
diff --git a/src/librustc_mir/interpret/traits.rs b/src/librustc_mir/interpret/traits.rs
index 49a80ca1345..589da04d6a3 100644
--- a/src/librustc_mir/interpret/traits.rs
+++ b/src/librustc_mir/interpret/traits.rs
@@ -118,7 +118,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             .memory
             .get_raw(vtable_slot.alloc_id)?
             .read_ptr_sized(self, vtable_slot)?
-            .not_undef()?;
+            .check_init()?;
         Ok(self.memory.get_fn(fn_ptr)?)
     }
 
@@ -137,7 +137,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             )?
             .expect("cannot be a ZST");
         let drop_fn =
-            self.memory.get_raw(vtable.alloc_id)?.read_ptr_sized(self, vtable)?.not_undef()?;
+            self.memory.get_raw(vtable.alloc_id)?.read_ptr_sized(self, vtable)?.check_init()?;
         // We *need* an instance here, no other kind of function value, to be able
         // to determine the type.
         let drop_instance = self.memory.get_fn(drop_fn)?.as_instance()?;
@@ -165,10 +165,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             .check_ptr_access(vtable, 3 * pointer_size, self.tcx.data_layout.pointer_align.abi)?
             .expect("cannot be a ZST");
         let alloc = self.memory.get_raw(vtable.alloc_id)?;
-        let size = alloc.read_ptr_sized(self, vtable.offset(pointer_size, self)?)?.not_undef()?;
+        let size = alloc.read_ptr_sized(self, vtable.offset(pointer_size, self)?)?.check_init()?;
         let size = u64::try_from(self.force_bits(size, pointer_size)?).unwrap();
         let align =
-            alloc.read_ptr_sized(self, vtable.offset(pointer_size * 2, self)?)?.not_undef()?;
+            alloc.read_ptr_sized(self, vtable.offset(pointer_size * 2, self)?)?.check_init()?;
         let align = u64::try_from(self.force_bits(align, pointer_size)?).unwrap();
 
         if size >= self.tcx.data_layout.obj_size_bound() {
diff --git a/src/librustc_mir/interpret/validity.rs b/src/librustc_mir/interpret/validity.rs
index 84f39ac8955..f1c5a67ed33 100644
--- a/src/librustc_mir/interpret/validity.rs
+++ b/src/librustc_mir/interpret/validity.rs
@@ -500,7 +500,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 // types below!
                 if self.ref_tracking_for_consts.is_some() {
                     // Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
-                    let is_bits = value.not_undef().map_or(false, |v| v.is_bits());
+                    let is_bits = value.check_init().map_or(false, |v| v.is_bits());
                     if !is_bits {
                         throw_validation_failure!(self.path,
                             { "{}", value } expected { "initialized plain (non-pointer) bytes" }
@@ -537,7 +537,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
             ty::FnPtr(_sig) => {
                 let value = self.ecx.read_scalar(value)?;
                 let _fn = try_validation!(
-                    value.not_undef().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
+                    value.check_init().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
                     self.path,
                     err_ub!(DanglingIntPointer(..)) |
                     err_ub!(InvalidFunctionPointer(..)) |
@@ -596,7 +596,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
         }
         // At least one value is excluded. Get the bits.
         let value = try_validation!(
-            value.not_undef(),
+            value.check_init(),
             self.path,
             err_ub!(InvalidUninitBytes(None)) => { "{}", value }
                 expected { "something {}", wrapping_range_format(valid_range, max_hi) },
diff --git a/src/librustc_mir_build/hair/pattern/_match.rs b/src/librustc_mir_build/hair/pattern/_match.rs
index 372cb783f50..3202f7d1b1b 100644
--- a/src/librustc_mir_build/hair/pattern/_match.rs
+++ b/src/librustc_mir_build/hair/pattern/_match.rs
@@ -2614,7 +2614,7 @@ fn specialize_one_pattern<'p, 'tcx>(
             let pats = cx.pattern_arena.alloc_from_iter((0..n).filter_map(|i| {
                 let ptr = ptr.offset(layout.size * i, &cx.tcx).ok()?;
                 let scalar = alloc.read_scalar(&cx.tcx, ptr, layout.size).ok()?;
-                let scalar = scalar.not_undef().ok()?;
+                let scalar = scalar.check_init().ok()?;
                 let value = ty::Const::from_scalar(cx.tcx, scalar, ty);
                 let pattern = Pat { ty, span: pat.span, kind: box PatKind::Constant { value } };
                 Some(pattern)