diff options
| author | Ralf Jung <post@ralfj.de> | 2021-07-12 18:45:26 +0200 |
|---|---|---|
| committer | Ralf Jung <post@ralfj.de> | 2021-07-12 18:45:26 +0200 |
| commit | c8baac5776141d9e844b05fef8d144e3664e7a75 (patch) | |
| tree | 1b58494224aeb77569c9f99dd9fc2d66d47fc7f5 | |
| parent | 3a24abd22fd25c836d8b4d75ff46c833f9c3934c (diff) | |
| download | rust-c8baac5776141d9e844b05fef8d144e3664e7a75.tar.gz rust-c8baac5776141d9e844b05fef8d144e3664e7a75.zip | |
remove remaining use of Pointer in Allocation API
| -rw-r--r-- | compiler/rustc_middle/src/mir/interpret/allocation.rs | 19 | ||||
| -rw-r--r-- | compiler/rustc_mir/src/interpret/memory.rs | 4 |
2 files changed, 11 insertions, 12 deletions
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index c2645a09140..75cbb55239c 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -512,7 +512,7 @@ impl InitMaskCompressed { /// Transferring the initialization mask to other allocations. impl<Tag, Extra> Allocation<Tag, Extra> { /// Creates a run-length encoding of the initialization mask. - pub fn compress_uninit_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCompressed { + pub fn compress_uninit_range(&self, range: AllocRange) -> InitMaskCompressed { // Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`), // a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from // the source and write it to the destination. Even if we optimized the memory accesses, @@ -526,13 +526,13 @@ impl<Tag, Extra> Allocation<Tag, Extra> { // where each element toggles the state. let mut ranges = smallvec::SmallVec::<[u64; 1]>::new(); - let initial = self.init_mask.get(src.offset); + let initial = self.init_mask.get(range.start); let mut cur_len = 1; let mut cur = initial; - for i in 1..size.bytes() { + for i in 1..range.size.bytes() { // FIXME: optimize to bitshift the current uninitialized block's bits and read the top bit. - if self.init_mask.get(src.offset + Size::from_bytes(i)) == cur { + if self.init_mask.get(range.start + Size::from_bytes(i)) == cur { cur_len += 1; } else { ranges.push(cur_len); @@ -550,24 +550,23 @@ impl<Tag, Extra> Allocation<Tag, Extra> { pub fn mark_compressed_init_range( &mut self, defined: &InitMaskCompressed, - dest: Pointer<Tag>, - size: Size, + range: AllocRange, repeat: u64, ) { // An optimization where we can just overwrite an entire range of initialization // bits if they are going to be uniformly `1` or `0`. if defined.ranges.len() <= 1 { self.init_mask.set_range_inbounds( - dest.offset, - dest.offset + size * repeat, // `Size` operations + range.start, + range.start + range.size * repeat, // `Size` operations defined.initial, ); return; } for mut j in 0..repeat { - j *= size.bytes(); - j += dest.offset.bytes(); + j *= range.size.bytes(); + j += range.start.bytes(); let mut cur = defined.initial; for range in &defined.ranges { let old_j = j; diff --git a/compiler/rustc_mir/src/interpret/memory.rs b/compiler/rustc_mir/src/interpret/memory.rs index cb929c21850..990dbbcd250 100644 --- a/compiler/rustc_mir/src/interpret/memory.rs +++ b/compiler/rustc_mir/src/interpret/memory.rs @@ -1049,7 +1049,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { num_copies, ); // Prepare a copy of the initialization mask. - let compressed = src_alloc.compress_uninit_range(src, size); + let compressed = src_alloc.compress_uninit_range(alloc_range(src.offset, size)); // This checks relocation edges on the src. let src_bytes = src_alloc .get_bytes_with_uninit_and_ptr(&tcx, alloc_range(src.offset, size)) @@ -1110,7 +1110,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { } // now fill in all the "init" data - dest_alloc.mark_compressed_init_range(&compressed, dest, size, num_copies); + dest_alloc.mark_compressed_init_range(&compressed, alloc_range(dest.offset, size), num_copies); // copy the relocations to the destination dest_alloc.mark_relocation_range(relocations); |
