diff options
| author | Mark Rousskov <mark.simulacrum@gmail.com> | 2024-04-14 13:52:56 -0400 | 
|---|---|---|
| committer | Mark Rousskov <mark.simulacrum@gmail.com> | 2024-04-16 21:13:21 -0400 | 
| commit | 649e80184bf238760a2162c6f93090c4ed6abae8 (patch) | |
| tree | 7558b7efe14ff2e12b0feddb4732bee9edd577fb /compiler/rustc_codegen_llvm/src/common.rs | |
| parent | ad18fe08de03fbb459c05475bddee22707b4f0ec (diff) | |
| download | rust-649e80184bf238760a2162c6f93090c4ed6abae8.tar.gz rust-649e80184bf238760a2162c6f93090c4ed6abae8.zip | |
Codegen ZSTs without an allocation
This makes sure that &[] is just as efficient as indirecting through unsafe code (from_raw_parts). No new stable guarantee is intended about whether or not we do this, this is just an optimization. Co-authored-by: Ralf Jung <post@ralfj.de>
Diffstat (limited to 'compiler/rustc_codegen_llvm/src/common.rs')
| -rw-r--r-- | compiler/rustc_codegen_llvm/src/common.rs | 49 | 
1 files changed, 33 insertions, 16 deletions
| diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index 568fcc3f3cf..ec33ce6292a 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -255,21 +255,38 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { let (prov, offset) = ptr.into_parts(); let (base_addr, base_addr_space) = match self.tcx.global_alloc(prov.alloc_id()) { GlobalAlloc::Memory(alloc) => { - let init = const_alloc_to_llvm(self, alloc); - let alloc = alloc.inner(); - let value = match alloc.mutability { - Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None), - _ => self.static_addr_of(init, alloc.align, None), - }; - if !self.sess().fewer_names() && llvm::get_value_name(value).is_empty() { - let hash = self.tcx.with_stable_hashing_context(|mut hcx| { - let mut hasher = StableHasher::new(); - alloc.hash_stable(&mut hcx, &mut hasher); - hasher.finish::<Hash128>() - }); - llvm::set_value_name(value, format!("alloc_{hash:032x}").as_bytes()); + // For ZSTs directly codegen an aligned pointer. + // This avoids generating a zero-sized constant value and actually needing a + // real address at runtime. + if alloc.inner().len() == 0 { + assert_eq!(offset.bytes(), 0); + let llval = self.const_usize(alloc.inner().align.bytes()); + return if matches!(layout.primitive(), Pointer(_)) { + unsafe { llvm::LLVMConstIntToPtr(llval, llty) } + } else { + self.const_bitcast(llval, llty) + }; + } else { + let init = const_alloc_to_llvm(self, alloc, /*static*/ false); + let alloc = alloc.inner(); + let value = match alloc.mutability { + Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None), + _ => self.static_addr_of(init, alloc.align, None), + }; + if !self.sess().fewer_names() && llvm::get_value_name(value).is_empty() + { + let hash = self.tcx.with_stable_hashing_context(|mut hcx| { + let mut hasher = StableHasher::new(); + alloc.hash_stable(&mut hcx, &mut hasher); + hasher.finish::<Hash128>() + }); + llvm::set_value_name( + value, + format!("alloc_{hash:032x}").as_bytes(), + ); + } + (value, AddressSpace::DATA) } - (value, AddressSpace::DATA) } GlobalAlloc::Function(fn_instance) => ( self.get_fn_addr(fn_instance.polymorphize(self.tcx)), @@ -280,7 +297,7 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { .tcx .global_alloc(self.tcx.vtable_allocation((ty, trait_ref))) .unwrap_memory(); - let init = const_alloc_to_llvm(self, alloc); + let init = const_alloc_to_llvm(self, alloc, /*static*/ false); let value = self.static_addr_of(init, alloc.inner().align, None); (value, AddressSpace::DATA) } @@ -308,7 +325,7 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { } fn const_data_from_alloc(&self, alloc: ConstAllocation<'tcx>) -> Self::Value { - const_alloc_to_llvm(self, alloc) + const_alloc_to_llvm(self, alloc, /*static*/ false) } fn const_bitcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value { | 
