about summary refs log tree commit diff
path: root/compiler/rustc_codegen_llvm/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_codegen_llvm/src')
-rw-r--r--compiler/rustc_codegen_llvm/src/abi.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/allocator.rs4
-rw-r--r--compiler/rustc_codegen_llvm/src/back/lto.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/back/write.rs16
-rw-r--r--compiler/rustc_codegen_llvm/src/base.rs6
-rw-r--r--compiler/rustc_codegen_llvm/src/builder.rs16
-rw-r--r--compiler/rustc_codegen_llvm/src/consts.rs7
-rw-r--r--compiler/rustc_codegen_llvm/src/context.rs44
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs89
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs348
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs4
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs82
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs3
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs20
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/mod.rs9
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/utils.rs4
-rw-r--r--compiler/rustc_codegen_llvm/src/intrinsic.rs36
-rw-r--r--compiler/rustc_codegen_llvm/src/lib.rs1
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm/ffi.rs13
-rw-r--r--compiler/rustc_codegen_llvm/src/type_of.rs5
-rw-r--r--compiler/rustc_codegen_llvm/src/va_arg.rs2
21 files changed, 623 insertions, 90 deletions
diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs
index 28be6d033f8..d221bad28ef 100644
--- a/compiler/rustc_codegen_llvm/src/abi.rs
+++ b/compiler/rustc_codegen_llvm/src/abi.rs
@@ -351,7 +351,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
                     continue;
                 }
                 PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
-                    let ptr_ty = cx.tcx.mk_mut_ptr(arg.layout.ty);
+                    let ptr_ty = Ty::new_mut_ptr(cx.tcx, arg.layout.ty);
                     let ptr_layout = cx.layout_of(ptr_ty);
                     llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 0, true));
                     llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 1, true));
diff --git a/compiler/rustc_codegen_llvm/src/allocator.rs b/compiler/rustc_codegen_llvm/src/allocator.rs
index ad0636894b7..a57508815d6 100644
--- a/compiler/rustc_codegen_llvm/src/allocator.rs
+++ b/compiler/rustc_codegen_llvm/src/allocator.rs
@@ -77,7 +77,7 @@ pub(crate) unsafe fn codegen(
                 llvm::LLVMRustGetOrInsertFunction(llmod, callee.as_ptr().cast(), callee.len(), ty);
             llvm::LLVMRustSetVisibility(callee, llvm::Visibility::Hidden);
 
-            let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, c"entry".as_ptr().cast());
+            let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, "entry\0".as_ptr().cast());
 
             let llbuilder = llvm::LLVMCreateBuilderInContext(llcx);
             llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb);
@@ -129,7 +129,7 @@ pub(crate) unsafe fn codegen(
     attributes::apply_to_llfn(callee, llvm::AttributePlace::Function, &[no_return]);
     llvm::LLVMRustSetVisibility(callee, llvm::Visibility::Hidden);
 
-    let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, c"entry".as_ptr().cast());
+    let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, "entry\0".as_ptr().cast());
 
     let llbuilder = llvm::LLVMCreateBuilderInContext(llcx);
     llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb);
diff --git a/compiler/rustc_codegen_llvm/src/back/lto.rs b/compiler/rustc_codegen_llvm/src/back/lto.rs
index 94885b40cc1..d7dd98d7938 100644
--- a/compiler/rustc_codegen_llvm/src/back/lto.rs
+++ b/compiler/rustc_codegen_llvm/src/back/lto.rs
@@ -601,7 +601,7 @@ pub(crate) fn run_pass_manager(
             llvm::LLVMRustAddModuleFlag(
                 module.module_llvm.llmod(),
                 llvm::LLVMModFlagBehavior::Error,
-                c"LTOPostLink".as_ptr().cast(),
+                "LTOPostLink\0".as_ptr().cast(),
                 1,
             );
         }
diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs
index 998e3b300da..0f5e975445f 100644
--- a/compiler/rustc_codegen_llvm/src/back/write.rs
+++ b/compiler/rustc_codegen_llvm/src/back/write.rs
@@ -931,16 +931,16 @@ unsafe fn embed_bitcode(
         let llglobal = llvm::LLVMAddGlobal(
             llmod,
             common::val_ty(llconst),
-            c"rustc.embedded.module".as_ptr().cast(),
+            "rustc.embedded.module\0".as_ptr().cast(),
         );
         llvm::LLVMSetInitializer(llglobal, llconst);
 
         let section = if is_apple {
-            c"__LLVM,__bitcode"
+            "__LLVM,__bitcode\0"
         } else if is_aix {
-            c".ipa"
+            ".ipa\0"
         } else {
-            c".llvmbc"
+            ".llvmbc\0"
         };
         llvm::LLVMSetSection(llglobal, section.as_ptr().cast());
         llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage);
@@ -950,15 +950,15 @@ unsafe fn embed_bitcode(
         let llglobal = llvm::LLVMAddGlobal(
             llmod,
             common::val_ty(llconst),
-            c"rustc.embedded.cmdline".as_ptr().cast(),
+            "rustc.embedded.cmdline\0".as_ptr().cast(),
         );
         llvm::LLVMSetInitializer(llglobal, llconst);
         let section = if is_apple {
-            c"__LLVM,__cmdline"
+            "__LLVM,__cmdline\0"
         } else if is_aix {
-            c".info"
+            ".info\0"
         } else {
-            c".llvmcmd"
+            ".llvmcmd\0"
         };
         llvm::LLVMSetSection(llglobal, section.as_ptr().cast());
         llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage);
diff --git a/compiler/rustc_codegen_llvm/src/base.rs b/compiler/rustc_codegen_llvm/src/base.rs
index 2f7eb08ad3d..5b2bbdb4bde 100644
--- a/compiler/rustc_codegen_llvm/src/base.rs
+++ b/compiler/rustc_codegen_llvm/src/base.rs
@@ -19,6 +19,8 @@ use crate::context::CodegenCx;
 use crate::llvm;
 use crate::value::Value;
 
+use cstr::cstr;
+
 use rustc_codegen_ssa::base::maybe_create_entry_wrapper;
 use rustc_codegen_ssa::mono_item::MonoItemExt;
 use rustc_codegen_ssa::traits::*;
@@ -108,11 +110,11 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen
 
             // Create the llvm.used and llvm.compiler.used variables.
             if !cx.used_statics.borrow().is_empty() {
-                cx.create_used_variable_impl(c"llvm.used", &*cx.used_statics.borrow());
+                cx.create_used_variable_impl(cstr!("llvm.used"), &*cx.used_statics.borrow());
             }
             if !cx.compiler_used_statics.borrow().is_empty() {
                 cx.create_used_variable_impl(
-                    c"llvm.compiler.used",
+                    cstr!("llvm.compiler.used"),
                     &*cx.compiler_used_statics.borrow(),
                 );
             }
diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs
index 9863ca35202..d55992bf092 100644
--- a/compiler/rustc_codegen_llvm/src/builder.rs
+++ b/compiler/rustc_codegen_llvm/src/builder.rs
@@ -6,6 +6,7 @@ use crate::llvm::{self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, False, True}
 use crate::type_::Type;
 use crate::type_of::LayoutLlvmExt;
 use crate::value::Value;
+use cstr::cstr;
 use libc::{c_char, c_uint};
 use rustc_codegen_ssa::common::{IntPredicate, RealPredicate, SynchronizationScope, TypeKind};
 use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
@@ -25,6 +26,7 @@ use rustc_target::abi::{self, call::FnAbi, Align, Size, WrappingRange};
 use rustc_target::spec::{HasTargetSpec, SanitizerSet, Target};
 use smallvec::SmallVec;
 use std::borrow::Cow;
+use std::ffi::CStr;
 use std::iter;
 use std::ops::Deref;
 use std::ptr;
@@ -44,10 +46,13 @@ impl Drop for Builder<'_, '_, '_> {
     }
 }
 
+// FIXME(eddyb) use a checked constructor when they become `const fn`.
+const EMPTY_C_STR: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"\0") };
+
 /// Empty string, to be used where LLVM expects an instruction name, indicating
 /// that the instruction is to be left unnamed (i.e. numbered, in textual IR).
 // FIXME(eddyb) pass `&CStr` directly to FFI once it's a thin pointer.
-const UNNAMED: *const c_char = c"".as_ptr();
+const UNNAMED: *const c_char = EMPTY_C_STR.as_ptr();
 
 impl<'ll, 'tcx> BackendTypes for Builder<'_, 'll, 'tcx> {
     type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
@@ -1002,13 +1007,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
     }
 
     fn cleanup_pad(&mut self, parent: Option<&'ll Value>, args: &[&'ll Value]) -> Funclet<'ll> {
+        let name = cstr!("cleanuppad");
         let ret = unsafe {
             llvm::LLVMBuildCleanupPad(
                 self.llbuilder,
                 parent,
                 args.as_ptr(),
                 args.len() as c_uint,
-                c"cleanuppad".as_ptr(),
+                name.as_ptr(),
             )
         };
         Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
@@ -1022,13 +1028,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
     }
 
     fn catch_pad(&mut self, parent: &'ll Value, args: &[&'ll Value]) -> Funclet<'ll> {
+        let name = cstr!("catchpad");
         let ret = unsafe {
             llvm::LLVMBuildCatchPad(
                 self.llbuilder,
                 parent,
                 args.as_ptr(),
                 args.len() as c_uint,
-                c"catchpad".as_ptr(),
+                name.as_ptr(),
             )
         };
         Funclet::new(ret.expect("LLVM does not have support for catchpad"))
@@ -1040,13 +1047,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         unwind: Option<&'ll BasicBlock>,
         handlers: &[&'ll BasicBlock],
     ) -> &'ll Value {
+        let name = cstr!("catchswitch");
         let ret = unsafe {
             llvm::LLVMBuildCatchSwitch(
                 self.llbuilder,
                 parent,
                 unwind,
                 handlers.len() as c_uint,
-                c"catchswitch".as_ptr(),
+                name.as_ptr(),
             )
         };
         let ret = ret.expect("LLVM does not have support for catchswitch");
diff --git a/compiler/rustc_codegen_llvm/src/consts.rs b/compiler/rustc_codegen_llvm/src/consts.rs
index 2087754c66b..df52f50f86f 100644
--- a/compiler/rustc_codegen_llvm/src/consts.rs
+++ b/compiler/rustc_codegen_llvm/src/consts.rs
@@ -8,6 +8,7 @@ use crate::llvm::{self, True};
 use crate::type_::Type;
 use crate::type_of::LayoutLlvmExt;
 use crate::value::Value;
+use cstr::cstr;
 use rustc_codegen_ssa::traits::*;
 use rustc_hir::def_id::DefId;
 use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
@@ -481,9 +482,9 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
                             .all(|&byte| byte == 0);
 
                     let sect_name = if all_bytes_are_zero {
-                        c"__DATA,__thread_bss"
+                        cstr!("__DATA,__thread_bss")
                     } else {
-                        c"__DATA,__thread_data"
+                        cstr!("__DATA,__thread_data")
                     };
                     llvm::LLVMSetSection(g, sect_name.as_ptr());
                 }
@@ -512,7 +513,7 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
                     let val = llvm::LLVMMetadataAsValue(self.llcx, meta);
                     llvm::LLVMAddNamedMetadataOperand(
                         self.llmod,
-                        c"wasm.custom_sections".as_ptr().cast(),
+                        "wasm.custom_sections\0".as_ptr().cast(),
                         val,
                     );
                 }
diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs
index 287a22bc9a6..e1e0a442845 100644
--- a/compiler/rustc_codegen_llvm/src/context.rs
+++ b/compiler/rustc_codegen_llvm/src/context.rs
@@ -8,6 +8,7 @@ use crate::llvm_util;
 use crate::type_::Type;
 use crate::value::Value;
 
+use cstr::cstr;
 use rustc_codegen_ssa::base::{wants_msvc_seh, wants_wasm_eh};
 use rustc_codegen_ssa::traits::*;
 use rustc_data_structures::base_n;
@@ -223,42 +224,36 @@ pub unsafe fn create_module<'ll>(
     // If skipping the PLT is enabled, we need to add some module metadata
     // to ensure intrinsic calls don't use it.
     if !sess.needs_plt() {
-        llvm::LLVMRustAddModuleFlag(
-            llmod,
-            llvm::LLVMModFlagBehavior::Warning,
-            c"RtLibUseGOT".as_ptr().cast(),
-            1,
-        );
+        let avoid_plt = "RtLibUseGOT\0".as_ptr().cast();
+        llvm::LLVMRustAddModuleFlag(llmod, llvm::LLVMModFlagBehavior::Warning, avoid_plt, 1);
     }
 
     // Enable canonical jump tables if CFI is enabled. (See https://reviews.llvm.org/D65629.)
     if sess.is_sanitizer_cfi_canonical_jump_tables_enabled() && sess.is_sanitizer_cfi_enabled() {
+        let canonical_jump_tables = "CFI Canonical Jump Tables\0".as_ptr().cast();
         llvm::LLVMRustAddModuleFlag(
             llmod,
             llvm::LLVMModFlagBehavior::Override,
-            c"CFI Canonical Jump Tables".as_ptr().cast(),
+            canonical_jump_tables,
             1,
         );
     }
 
     // Enable LTO unit splitting if specified or if CFI is enabled. (See https://reviews.llvm.org/D53891.)
     if sess.is_split_lto_unit_enabled() || sess.is_sanitizer_cfi_enabled() {
+        let enable_split_lto_unit = "EnableSplitLTOUnit\0".as_ptr().cast();
         llvm::LLVMRustAddModuleFlag(
             llmod,
             llvm::LLVMModFlagBehavior::Override,
-            c"EnableSplitLTOUnit".as_ptr().cast(),
+            enable_split_lto_unit,
             1,
         );
     }
 
     // Add "kcfi" module flag if KCFI is enabled. (See https://reviews.llvm.org/D119296.)
     if sess.is_sanitizer_kcfi_enabled() {
-        llvm::LLVMRustAddModuleFlag(
-            llmod,
-            llvm::LLVMModFlagBehavior::Override,
-            c"kcfi".as_ptr().cast(),
-            1,
-        );
+        let kcfi = "kcfi\0".as_ptr().cast();
+        llvm::LLVMRustAddModuleFlag(llmod, llvm::LLVMModFlagBehavior::Override, kcfi, 1);
     }
 
     // Control Flow Guard is currently only supported by the MSVC linker on Windows.
@@ -270,7 +265,7 @@ pub unsafe fn create_module<'ll>(
                 llvm::LLVMRustAddModuleFlag(
                     llmod,
                     llvm::LLVMModFlagBehavior::Warning,
-                    c"cfguard".as_ptr() as *const _,
+                    "cfguard\0".as_ptr() as *const _,
                     1,
                 )
             }
@@ -279,7 +274,7 @@ pub unsafe fn create_module<'ll>(
                 llvm::LLVMRustAddModuleFlag(
                     llmod,
                     llvm::LLVMModFlagBehavior::Warning,
-                    c"cfguard".as_ptr() as *const _,
+                    "cfguard\0".as_ptr() as *const _,
                     2,
                 )
             }
@@ -297,26 +292,26 @@ pub unsafe fn create_module<'ll>(
             llvm::LLVMRustAddModuleFlag(
                 llmod,
                 behavior,
-                c"branch-target-enforcement".as_ptr().cast(),
+                "branch-target-enforcement\0".as_ptr().cast(),
                 bti.into(),
             );
             llvm::LLVMRustAddModuleFlag(
                 llmod,
                 behavior,
-                c"sign-return-address".as_ptr().cast(),
+                "sign-return-address\0".as_ptr().cast(),
                 pac_ret.is_some().into(),
             );
             let pac_opts = pac_ret.unwrap_or(PacRet { leaf: false, key: PAuthKey::A });
             llvm::LLVMRustAddModuleFlag(
                 llmod,
                 behavior,
-                c"sign-return-address-all".as_ptr().cast(),
+                "sign-return-address-all\0".as_ptr().cast(),
                 pac_opts.leaf.into(),
             );
             llvm::LLVMRustAddModuleFlag(
                 llmod,
                 behavior,
-                c"sign-return-address-with-bkey".as_ptr().cast(),
+                "sign-return-address-with-bkey\0".as_ptr().cast(),
                 u32::from(pac_opts.key == PAuthKey::B),
             );
         } else {
@@ -332,7 +327,7 @@ pub unsafe fn create_module<'ll>(
         llvm::LLVMRustAddModuleFlag(
             llmod,
             llvm::LLVMModFlagBehavior::Override,
-            c"cf-protection-branch".as_ptr().cast(),
+            "cf-protection-branch\0".as_ptr().cast(),
             1,
         )
     }
@@ -340,7 +335,7 @@ pub unsafe fn create_module<'ll>(
         llvm::LLVMRustAddModuleFlag(
             llmod,
             llvm::LLVMModFlagBehavior::Override,
-            c"cf-protection-return".as_ptr().cast(),
+            "cf-protection-return\0".as_ptr().cast(),
             1,
         )
     }
@@ -349,7 +344,7 @@ pub unsafe fn create_module<'ll>(
         llvm::LLVMRustAddModuleFlag(
             llmod,
             llvm::LLVMModFlagBehavior::Error,
-            c"Virtual Function Elim".as_ptr().cast(),
+            "Virtual Function Elim\0".as_ptr().cast(),
             1,
         );
     }
@@ -481,13 +476,14 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
     }
 
     pub(crate) fn create_used_variable_impl(&self, name: &'static CStr, values: &[&'ll Value]) {
+        let section = cstr!("llvm.metadata");
         let array = self.const_array(self.type_ptr_to(self.type_i8()), values);
 
         unsafe {
             let g = llvm::LLVMAddGlobal(self.llmod, self.val_ty(array), name.as_ptr());
             llvm::LLVMSetInitializer(g, array);
             llvm::LLVMRustSetLinkage(g, llvm::Linkage::AppendingLinkage);
-            llvm::LLVMSetSection(g, c"llvm.metadata".as_ptr());
+            llvm::LLVMSetSection(g, section.as_ptr());
         }
     }
 }
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
new file mode 100644
index 00000000000..1791ce4b315
--- /dev/null
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
@@ -0,0 +1,89 @@
+use rustc_middle::mir::coverage::{CounterValueReference, MappedExpressionIndex};
+
+/// Must match the layout of `LLVMRustCounterKind`.
+#[derive(Copy, Clone, Debug)]
+#[repr(C)]
+pub enum CounterKind {
+    Zero = 0,
+    CounterValueReference = 1,
+    Expression = 2,
+}
+
+/// A reference to an instance of an abstract "counter" that will yield a value in a coverage
+/// report. Note that `id` has different interpretations, depending on the `kind`:
+///   * For `CounterKind::Zero`, `id` is assumed to be `0`
+///   * For `CounterKind::CounterValueReference`,  `id` matches the `counter_id` of the injected
+///     instrumentation counter (the `index` argument to the LLVM intrinsic
+///     `instrprof.increment()`)
+///   * For `CounterKind::Expression`, `id` is the index into the coverage map's array of
+///     counter expressions.
+///
+/// Corresponds to struct `llvm::coverage::Counter`.
+///
+/// Must match the layout of `LLVMRustCounter`.
+#[derive(Copy, Clone, Debug)]
+#[repr(C)]
+pub struct Counter {
+    // Important: The layout (order and types of fields) must match its C++ counterpart.
+    pub kind: CounterKind,
+    id: u32,
+}
+
+impl Counter {
+    /// Constructs a new `Counter` of kind `Zero`. For this `CounterKind`, the
+    /// `id` is not used.
+    pub fn zero() -> Self {
+        Self { kind: CounterKind::Zero, id: 0 }
+    }
+
+    /// Constructs a new `Counter` of kind `CounterValueReference`, and converts
+    /// the given 1-based counter_id to the required 0-based equivalent for
+    /// the `Counter` encoding.
+    pub fn counter_value_reference(counter_id: CounterValueReference) -> Self {
+        Self { kind: CounterKind::CounterValueReference, id: counter_id.zero_based_index() }
+    }
+
+    /// Constructs a new `Counter` of kind `Expression`.
+    pub fn expression(mapped_expression_index: MappedExpressionIndex) -> Self {
+        Self { kind: CounterKind::Expression, id: mapped_expression_index.into() }
+    }
+
+    /// Returns true if the `Counter` kind is `Zero`.
+    pub fn is_zero(&self) -> bool {
+        matches!(self.kind, CounterKind::Zero)
+    }
+
+    /// An explicitly-named function to get the ID value, making it more obvious
+    /// that the stored value is now 0-based.
+    pub fn zero_based_id(&self) -> u32 {
+        debug_assert!(!self.is_zero(), "`id` is undefined for CounterKind::Zero");
+        self.id
+    }
+}
+
+/// Corresponds to enum `llvm::coverage::CounterExpression::ExprKind`.
+///
+/// Must match the layout of `LLVMRustCounterExprKind`.
+#[derive(Copy, Clone, Debug)]
+#[repr(C)]
+pub enum ExprKind {
+    Subtract = 0,
+    Add = 1,
+}
+
+/// Corresponds to struct `llvm::coverage::CounterExpression`.
+///
+/// Must match the layout of `LLVMRustCounterExpression`.
+#[derive(Copy, Clone, Debug)]
+#[repr(C)]
+pub struct CounterExpression {
+    pub kind: ExprKind,
+    pub lhs: Counter,
+    pub rhs: Counter,
+}
+
+impl CounterExpression {
+    pub fn new(lhs: Counter, kind: ExprKind, rhs: Counter) -> Self {
+        Self { kind, lhs, rhs }
+    }
+}
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs
new file mode 100644
index 00000000000..06844afd6b8
--- /dev/null
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs
@@ -0,0 +1,348 @@
+pub use super::ffi::*;
+
+use rustc_index::{IndexSlice, IndexVec};
+use rustc_middle::bug;
+use rustc_middle::mir::coverage::{
+    CodeRegion, CounterValueReference, ExpressionOperandId, InjectedExpressionId,
+    InjectedExpressionIndex, MappedExpressionIndex, Op,
+};
+use rustc_middle::ty::Instance;
+use rustc_middle::ty::TyCtxt;
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct Expression {
+    lhs: ExpressionOperandId,
+    op: Op,
+    rhs: ExpressionOperandId,
+    region: Option<CodeRegion>,
+}
+
+/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
+/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero),
+/// for a given Function. Counters and counter expressions have non-overlapping `id`s because they
+/// can both be operands in an expression. This struct also stores the `function_source_hash`,
+/// computed during instrumentation, and forwarded with counters.
+///
+/// Note, it may be important to understand LLVM's definitions of `unreachable` regions versus "gap
+/// regions" (or "gap areas"). A gap region is a code region within a counted region (either counter
+/// or expression), but the line or lines in the gap region are not executable (such as lines with
+/// only whitespace or comments). According to LLVM Code Coverage Mapping documentation, "A count
+/// for a gap area is only used as the line execution count if there are no other regions on a
+/// line."
+#[derive(Debug)]
+pub struct FunctionCoverage<'tcx> {
+    instance: Instance<'tcx>,
+    source_hash: u64,
+    is_used: bool,
+    counters: IndexVec<CounterValueReference, Option<CodeRegion>>,
+    expressions: IndexVec<InjectedExpressionIndex, Option<Expression>>,
+    unreachable_regions: Vec<CodeRegion>,
+}
+
+impl<'tcx> FunctionCoverage<'tcx> {
+    /// Creates a new set of coverage data for a used (called) function.
+    pub fn new(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
+        Self::create(tcx, instance, true)
+    }
+
+    /// Creates a new set of coverage data for an unused (never called) function.
+    pub fn unused(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
+        Self::create(tcx, instance, false)
+    }
+
+    fn create(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, is_used: bool) -> Self {
+        let coverageinfo = tcx.coverageinfo(instance.def);
+        debug!(
+            "FunctionCoverage::create(instance={:?}) has coverageinfo={:?}. is_used={}",
+            instance, coverageinfo, is_used
+        );
+        Self {
+            instance,
+            source_hash: 0, // will be set with the first `add_counter()`
+            is_used,
+            counters: IndexVec::from_elem_n(None, coverageinfo.num_counters as usize),
+            expressions: IndexVec::from_elem_n(None, coverageinfo.num_expressions as usize),
+            unreachable_regions: Vec::new(),
+        }
+    }
+
+    /// Returns true for a used (called) function, and false for an unused function.
+    pub fn is_used(&self) -> bool {
+        self.is_used
+    }
+
+    /// Sets the function source hash value. If called multiple times for the same function, all
+    /// calls should have the same hash value.
+    pub fn set_function_source_hash(&mut self, source_hash: u64) {
+        if self.source_hash == 0 {
+            self.source_hash = source_hash;
+        } else {
+            debug_assert_eq!(source_hash, self.source_hash);
+        }
+    }
+
+    /// Adds a code region to be counted by an injected counter intrinsic.
+    pub fn add_counter(&mut self, id: CounterValueReference, region: CodeRegion) {
+        if let Some(previous_region) = self.counters[id].replace(region.clone()) {
+            assert_eq!(previous_region, region, "add_counter: code region for id changed");
+        }
+    }
+
+    /// Both counters and "counter expressions" (or simply, "expressions") can be operands in other
+    /// expressions. Expression IDs start from `u32::MAX` and go down, so the range of expression
+    /// IDs will not overlap with the range of counter IDs. Counters and expressions can be added in
+    /// any order, and expressions can still be assigned contiguous (though descending) IDs, without
+    /// knowing what the last counter ID will be.
+    ///
+    /// When storing the expression data in the `expressions` vector in the `FunctionCoverage`
+    /// struct, its vector index is computed, from the given expression ID, by subtracting from
+    /// `u32::MAX`.
+    ///
+    /// Since the expression operands (`lhs` and `rhs`) can reference either counters or
+    /// expressions, an operand that references an expression also uses its original ID, descending
+    /// from `u32::MAX`. Theses operands are translated only during code generation, after all
+    /// counters and expressions have been added.
+    pub fn add_counter_expression(
+        &mut self,
+        expression_id: InjectedExpressionId,
+        lhs: ExpressionOperandId,
+        op: Op,
+        rhs: ExpressionOperandId,
+        region: Option<CodeRegion>,
+    ) {
+        debug!(
+            "add_counter_expression({:?}, lhs={:?}, op={:?}, rhs={:?} at {:?}",
+            expression_id, lhs, op, rhs, region
+        );
+        let expression_index = self.expression_index(u32::from(expression_id));
+        debug_assert!(
+            expression_index.as_usize() < self.expressions.len(),
+            "expression_index {} is out of range for expressions.len() = {}
+            for {:?}",
+            expression_index.as_usize(),
+            self.expressions.len(),
+            self,
+        );
+        if let Some(previous_expression) = self.expressions[expression_index].replace(Expression {
+            lhs,
+            op,
+            rhs,
+            region: region.clone(),
+        }) {
+            assert_eq!(
+                previous_expression,
+                Expression { lhs, op, rhs, region },
+                "add_counter_expression: expression for id changed"
+            );
+        }
+    }
+
+    /// Add a region that will be marked as "unreachable", with a constant "zero counter".
+    pub fn add_unreachable_region(&mut self, region: CodeRegion) {
+        self.unreachable_regions.push(region)
+    }
+
+    /// Return the source hash, generated from the HIR node structure, and used to indicate whether
+    /// or not the source code structure changed between different compilations.
+    pub fn source_hash(&self) -> u64 {
+        self.source_hash
+    }
+
+    /// Generate an array of CounterExpressions, and an iterator over all `Counter`s and their
+    /// associated `Regions` (from which the LLVM-specific `CoverageMapGenerator` will create
+    /// `CounterMappingRegion`s.
+    pub fn get_expressions_and_counter_regions(
+        &self,
+    ) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &CodeRegion)>) {
+        assert!(
+            self.source_hash != 0 || !self.is_used,
+            "No counters provided the source_hash for used function: {:?}",
+            self.instance
+        );
+
+        let counter_regions = self.counter_regions();
+        let (counter_expressions, expression_regions) = self.expressions_with_regions();
+        let unreachable_regions = self.unreachable_regions();
+
+        let counter_regions =
+            counter_regions.chain(expression_regions.into_iter().chain(unreachable_regions));
+        (counter_expressions, counter_regions)
+    }
+
+    fn counter_regions(&self) -> impl Iterator<Item = (Counter, &CodeRegion)> {
+        self.counters.iter_enumerated().filter_map(|(index, entry)| {
+            // Option::map() will return None to filter out missing counters. This may happen
+            // if, for example, a MIR-instrumented counter is removed during an optimization.
+            entry.as_ref().map(|region| (Counter::counter_value_reference(index), region))
+        })
+    }
+
+    fn expressions_with_regions(
+        &self,
+    ) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &CodeRegion)>) {
+        let mut counter_expressions = Vec::with_capacity(self.expressions.len());
+        let mut expression_regions = Vec::with_capacity(self.expressions.len());
+        let mut new_indexes = IndexVec::from_elem_n(None, self.expressions.len());
+
+        // This closure converts any `Expression` operand (`lhs` or `rhs` of the `Op::Add` or
+        // `Op::Subtract` operation) into its native `llvm::coverage::Counter::CounterKind` type
+        // and value. Operand ID value `0` maps to `CounterKind::Zero`; values in the known range
+        // of injected LLVM counters map to `CounterKind::CounterValueReference` (and the value
+        // matches the injected counter index); and any other value is converted into a
+        // `CounterKind::Expression` with the expression's `new_index`.
+        //
+        // Expressions will be returned from this function in a sequential vector (array) of
+        // `CounterExpression`, so the expression IDs must be mapped from their original,
+        // potentially sparse set of indexes, originally in reverse order from `u32::MAX`.
+        //
+        // An `Expression` as an operand will have already been encountered as an `Expression` with
+        // operands, so its new_index will already have been generated (as a 1-up index value).
+        // (If an `Expression` as an operand does not have a corresponding new_index, it was
+        // probably optimized out, after the expression was injected into the MIR, so it will
+        // get a `CounterKind::Zero` instead.)
+        //
+        // In other words, an `Expression`s at any given index can include other expressions as
+        // operands, but expression operands can only come from the subset of expressions having
+        // `expression_index`s lower than the referencing `Expression`. Therefore, it is
+        // reasonable to look up the new index of an expression operand while the `new_indexes`
+        // vector is only complete up to the current `ExpressionIndex`.
+        let id_to_counter = |new_indexes: &IndexSlice<
+            InjectedExpressionIndex,
+            Option<MappedExpressionIndex>,
+        >,
+                             id: ExpressionOperandId| {
+            if id == ExpressionOperandId::ZERO {
+                Some(Counter::zero())
+            } else if id.index() < self.counters.len() {
+                debug_assert!(
+                    id.index() > 0,
+                    "ExpressionOperandId indexes for counters are 1-based, but this id={}",
+                    id.index()
+                );
+                // Note: Some codegen-injected Counters may be only referenced by `Expression`s,
+                // and may not have their own `CodeRegion`s,
+                let index = CounterValueReference::from(id.index());
+                // Note, the conversion to LLVM `Counter` adjusts the index to be zero-based.
+                Some(Counter::counter_value_reference(index))
+            } else {
+                let index = self.expression_index(u32::from(id));
+                self.expressions
+                    .get(index)
+                    .expect("expression id is out of range")
+                    .as_ref()
+                    // If an expression was optimized out, assume it would have produced a count
+                    // of zero. This ensures that expressions dependent on optimized-out
+                    // expressions are still valid.
+                    .map_or(Some(Counter::zero()), |_| new_indexes[index].map(Counter::expression))
+            }
+        };
+
+        for (original_index, expression) in
+            self.expressions.iter_enumerated().filter_map(|(original_index, entry)| {
+                // Option::map() will return None to filter out missing expressions. This may happen
+                // if, for example, a MIR-instrumented expression is removed during an optimization.
+                entry.as_ref().map(|expression| (original_index, expression))
+            })
+        {
+            let optional_region = &expression.region;
+            let Expression { lhs, op, rhs, .. } = *expression;
+
+            if let Some(Some((lhs_counter, mut rhs_counter))) = id_to_counter(&new_indexes, lhs)
+                .map(|lhs_counter| {
+                    id_to_counter(&new_indexes, rhs).map(|rhs_counter| (lhs_counter, rhs_counter))
+                })
+            {
+                if lhs_counter.is_zero() && op.is_subtract() {
+                    // The left side of a subtraction was probably optimized out. As an example,
+                    // a branch condition might be evaluated as a constant expression, and the
+                    // branch could be removed, dropping unused counters in the process.
+                    //
+                    // Since counters are unsigned, we must assume the result of the expression
+                    // can be no more and no less than zero. An expression known to evaluate to zero
+                    // does not need to be added to the coverage map.
+                    //
+                    // Coverage test `loops_branches.rs` includes multiple variations of branches
+                    // based on constant conditional (literal `true` or `false`), and demonstrates
+                    // that the expected counts are still correct.
+                    debug!(
+                        "Expression subtracts from zero (assume unreachable): \
+                        original_index={:?}, lhs={:?}, op={:?}, rhs={:?}, region={:?}",
+                        original_index, lhs, op, rhs, optional_region,
+                    );
+                    rhs_counter = Counter::zero();
+                }
+                debug_assert!(
+                    lhs_counter.is_zero()
+                        // Note: with `as usize` the ID _could_ overflow/wrap if `usize = u16`
+                        || ((lhs_counter.zero_based_id() as usize)
+                            <= usize::max(self.counters.len(), self.expressions.len())),
+                    "lhs id={} > both counters.len()={} and expressions.len()={}
+                    ({:?} {:?} {:?})",
+                    lhs_counter.zero_based_id(),
+                    self.counters.len(),
+                    self.expressions.len(),
+                    lhs_counter,
+                    op,
+                    rhs_counter,
+                );
+
+                debug_assert!(
+                    rhs_counter.is_zero()
+                        // Note: with `as usize` the ID _could_ overflow/wrap if `usize = u16`
+                        || ((rhs_counter.zero_based_id() as usize)
+                            <= usize::max(self.counters.len(), self.expressions.len())),
+                    "rhs id={} > both counters.len()={} and expressions.len()={}
+                    ({:?} {:?} {:?})",
+                    rhs_counter.zero_based_id(),
+                    self.counters.len(),
+                    self.expressions.len(),
+                    lhs_counter,
+                    op,
+                    rhs_counter,
+                );
+
+                // Both operands exist. `Expression` operands exist in `self.expressions` and have
+                // been assigned a `new_index`.
+                let mapped_expression_index =
+                    MappedExpressionIndex::from(counter_expressions.len());
+                let expression = CounterExpression::new(
+                    lhs_counter,
+                    match op {
+                        Op::Add => ExprKind::Add,
+                        Op::Subtract => ExprKind::Subtract,
+                    },
+                    rhs_counter,
+                );
+                debug!(
+                    "Adding expression {:?} = {:?}, region: {:?}",
+                    mapped_expression_index, expression, optional_region
+                );
+                counter_expressions.push(expression);
+                new_indexes[original_index] = Some(mapped_expression_index);
+                if let Some(region) = optional_region {
+                    expression_regions.push((Counter::expression(mapped_expression_index), region));
+                }
+            } else {
+                bug!(
+                    "expression has one or more missing operands \
+                      original_index={:?}, lhs={:?}, op={:?}, rhs={:?}, region={:?}",
+                    original_index,
+                    lhs,
+                    op,
+                    rhs,
+                    optional_region,
+                );
+            }
+        }
+        (counter_expressions, expression_regions.into_iter())
+    }
+
+    fn unreachable_regions(&self) -> impl Iterator<Item = (Counter, &CodeRegion)> {
+        self.unreachable_regions.iter().map(|region| (Counter::zero(), region))
+    }
+
+    fn expression_index(&self, id_descending_from_max: u32) -> InjectedExpressionIndex {
+        debug_assert!(id_descending_from_max >= self.counters.len() as u32);
+        InjectedExpressionIndex::from(u32::MAX - id_descending_from_max)
+    }
+}
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
index 21a1ac34844..a1ff2aa6625 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
@@ -1,10 +1,10 @@
 use crate::common::CodegenCx;
 use crate::coverageinfo;
+use crate::coverageinfo::map_data::{Counter, CounterExpression};
 use crate::llvm;
 
 use llvm::coverageinfo::CounterMappingRegion;
-use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression};
-use rustc_codegen_ssa::traits::{ConstMethods, CoverageInfoMethods};
+use rustc_codegen_ssa::traits::ConstMethods;
 use rustc_data_structures::fx::FxIndexSet;
 use rustc_hir::def::DefKind;
 use rustc_hir::def_id::DefId;
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
index cd261293e9b..42fdbd78618 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
@@ -3,13 +3,13 @@ use crate::llvm;
 use crate::abi::Abi;
 use crate::builder::Builder;
 use crate::common::CodegenCx;
+use crate::coverageinfo::map_data::{CounterExpression, FunctionCoverage};
 
 use libc::c_uint;
 use llvm::coverageinfo::CounterMappingRegion;
-use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, FunctionCoverage};
 use rustc_codegen_ssa::traits::{
-    BaseTypeMethods, BuilderMethods, ConstMethods, CoverageInfoBuilderMethods, CoverageInfoMethods,
-    MiscMethods, StaticMethods,
+    BaseTypeMethods, BuilderMethods, ConstMethods, CoverageInfoBuilderMethods, MiscMethods,
+    StaticMethods,
 };
 use rustc_data_structures::fx::FxHashMap;
 use rustc_hir as hir;
@@ -17,16 +17,20 @@ use rustc_hir::def_id::DefId;
 use rustc_llvm::RustString;
 use rustc_middle::bug;
 use rustc_middle::mir::coverage::{
-    CodeRegion, CounterValueReference, ExpressionOperandId, InjectedExpressionId, Op,
+    CodeRegion, CounterValueReference, CoverageKind, ExpressionOperandId, InjectedExpressionId, Op,
 };
+use rustc_middle::mir::Coverage;
 use rustc_middle::ty;
-use rustc_middle::ty::layout::FnAbiOf;
+use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt};
 use rustc_middle::ty::subst::InternalSubsts;
 use rustc_middle::ty::Instance;
+use rustc_middle::ty::Ty;
 
 use std::cell::RefCell;
 use std::ffi::CString;
 
+mod ffi;
+pub(crate) mod map_data;
 pub mod mapgen;
 
 const UNUSED_FUNCTION_COUNTER_ID: CounterValueReference = CounterValueReference::START;
@@ -53,11 +57,17 @@ impl<'ll, 'tcx> CrateCoverageContext<'ll, 'tcx> {
     }
 }
 
-impl<'ll, 'tcx> CoverageInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
-    fn coverageinfo_finalize(&self) {
+// These methods used to be part of trait `CoverageInfoMethods`, which no longer
+// exists after most coverage code was moved out of SSA.
+impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
+    pub(crate) fn coverageinfo_finalize(&self) {
         mapgen::finalize(self)
     }
 
+    /// For LLVM codegen, returns a function-specific `Value` for a global
+    /// string, to hold the function name passed to LLVM intrinsic
+    /// `instrprof.increment()`. The `Value` is only created once per instance.
+    /// Multiple invocations with the same instance return the same `Value`.
     fn get_pgo_func_name_var(&self, instance: Instance<'tcx>) -> &'ll llvm::Value {
         if let Some(coverage_context) = self.coverage_context() {
             debug!("getting pgo_func_name_var for instance={:?}", instance);
@@ -94,6 +104,54 @@ impl<'ll, 'tcx> CoverageInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
 }
 
 impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
+    fn add_coverage(&mut self, instance: Instance<'tcx>, coverage: &Coverage) {
+        let bx = self;
+
+        let Coverage { kind, code_region } = coverage.clone();
+        match kind {
+            CoverageKind::Counter { function_source_hash, id } => {
+                if bx.set_function_source_hash(instance, function_source_hash) {
+                    // If `set_function_source_hash()` returned true, the coverage map is enabled,
+                    // so continue adding the counter.
+                    if let Some(code_region) = code_region {
+                        // Note: Some counters do not have code regions, but may still be referenced
+                        // from expressions. In that case, don't add the counter to the coverage map,
+                        // but do inject the counter intrinsic.
+                        bx.add_coverage_counter(instance, id, code_region);
+                    }
+
+                    let coverageinfo = bx.tcx().coverageinfo(instance.def);
+
+                    let fn_name = bx.get_pgo_func_name_var(instance);
+                    let hash = bx.const_u64(function_source_hash);
+                    let num_counters = bx.const_u32(coverageinfo.num_counters);
+                    let index = bx.const_u32(id.zero_based_index());
+                    debug!(
+                        "codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
+                        fn_name, hash, num_counters, index,
+                    );
+                    bx.instrprof_increment(fn_name, hash, num_counters, index);
+                }
+            }
+            CoverageKind::Expression { id, lhs, op, rhs } => {
+                bx.add_coverage_counter_expression(instance, id, lhs, op, rhs, code_region);
+            }
+            CoverageKind::Unreachable => {
+                bx.add_coverage_unreachable(
+                    instance,
+                    code_region.expect("unreachable regions always have code regions"),
+                );
+            }
+        }
+    }
+}
+
+// These methods used to be part of trait `CoverageInfoBuilderMethods`, but
+// after moving most coverage code out of SSA they are now just ordinary methods.
+impl<'tcx> Builder<'_, '_, 'tcx> {
+    /// Returns true if the function source hash was added to the coverage map (even if it had
+    /// already been added, for this instance). Returns false *only* if `-C instrument-coverage` is
+    /// not enabled (a coverage map is not being generated).
     fn set_function_source_hash(
         &mut self,
         instance: Instance<'tcx>,
@@ -115,6 +173,8 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
         }
     }
 
+    /// Returns true if the counter was added to the coverage map; false if `-C instrument-coverage`
+    /// is not enabled (a coverage map is not being generated).
     fn add_coverage_counter(
         &mut self,
         instance: Instance<'tcx>,
@@ -137,6 +197,8 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
         }
     }
 
+    /// Returns true if the expression was added to the coverage map; false if
+    /// `-C instrument-coverage` is not enabled (a coverage map is not being generated).
     fn add_coverage_counter_expression(
         &mut self,
         instance: Instance<'tcx>,
@@ -163,6 +225,8 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
         }
     }
 
+    /// Returns true if the region was added to the coverage map; false if `-C instrument-coverage`
+    /// is not enabled (a coverage map is not being generated).
     fn add_coverage_unreachable(&mut self, instance: Instance<'tcx>, region: CodeRegion) -> bool {
         if let Some(coverage_context) = self.coverage_context() {
             debug!(
@@ -199,8 +263,8 @@ fn declare_unused_fn<'tcx>(cx: &CodegenCx<'_, 'tcx>, def_id: DefId) -> Instance<
         tcx.symbol_name(instance).name,
         cx.fn_abi_of_fn_ptr(
             ty::Binder::dummy(tcx.mk_fn_sig(
-                [tcx.mk_unit()],
-                tcx.mk_unit(),
+                [Ty::new_unit(tcx)],
+                Ty::new_unit(tcx),
                 false,
                 hir::Unsafety::Unsafe,
                 Abi::Rust,
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs
index 8be54b7eb71..37f30917609 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs
@@ -38,6 +38,7 @@ pub fn get_or_insert_gdb_debug_scripts_section_global<'ll>(cx: &CodegenCx<'ll, '
         unsafe { llvm::LLVMGetNamedGlobal(cx.llmod, c_section_var_name.as_ptr().cast()) };
 
     section_var.unwrap_or_else(|| {
+        let section_name = b".debug_gdb_scripts\0";
         let mut section_contents = Vec::new();
 
         // Add the pretty printers for the standard library first.
@@ -70,7 +71,7 @@ pub fn get_or_insert_gdb_debug_scripts_section_global<'ll>(cx: &CodegenCx<'ll, '
             let section_var = cx
                 .define_global(section_var_name, llvm_type)
                 .unwrap_or_else(|| bug!("symbol `{}` is already defined", section_var_name));
-            llvm::LLVMSetSection(section_var, c".debug_gdb_scripts".as_ptr().cast());
+            llvm::LLVMSetSection(section_var, section_name.as_ptr().cast());
             llvm::LLVMSetInitializer(section_var, cx.const_bytes(section_contents));
             llvm::LLVMSetGlobalConstant(section_var, llvm::True);
             llvm::LLVMSetUnnamedAddress(section_var, llvm::UnnamedAddr::Global);
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
index 4b88ab8a97a..d61400d3fa3 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
@@ -20,6 +20,7 @@ use crate::llvm::debuginfo::{
 };
 use crate::value::Value;
 
+use cstr::cstr;
 use rustc_codegen_ssa::debuginfo::type_names::cpp_like_debuginfo;
 use rustc_codegen_ssa::debuginfo::type_names::VTableNameKind;
 use rustc_codegen_ssa::traits::*;
@@ -167,7 +168,7 @@ fn build_pointer_or_reference_di_node<'ll, 'tcx>(
     // a (fat) pointer. Make sure it is not called for e.g. `Box<T, NonZSTAllocator>`.
     debug_assert_eq!(
         cx.size_and_align_of(ptr_type),
-        cx.size_and_align_of(cx.tcx.mk_mut_ptr(pointee_type))
+        cx.size_and_align_of(Ty::new_mut_ptr(cx.tcx, pointee_type))
     );
 
     let pointee_type_di_node = type_di_node(cx, pointee_type);
@@ -222,8 +223,11 @@ fn build_pointer_or_reference_di_node<'ll, 'tcx>(
                     //        at all and instead emit regular struct debuginfo for it. We just
                     //        need to make sure that we don't break existing debuginfo consumers
                     //        by doing that (at least not without a warning period).
-                    let layout_type =
-                        if ptr_type.is_box() { cx.tcx.mk_mut_ptr(pointee_type) } else { ptr_type };
+                    let layout_type = if ptr_type.is_box() {
+                        Ty::new_mut_ptr(cx.tcx, pointee_type)
+                    } else {
+                        ptr_type
+                    };
 
                     let layout = cx.layout_of(layout_type);
                     let addr_field = layout.field(cx, abi::FAT_PTR_ADDR);
@@ -811,6 +815,7 @@ pub fn build_compile_unit_di_node<'ll, 'tcx>(
 
     let name_in_debuginfo = name_in_debuginfo.to_string_lossy();
     let work_dir = tcx.sess.opts.working_dir.to_string_lossy(FileNameDisplayPreference::Remapped);
+    let flags = "\0";
     let output_filenames = tcx.output_filenames(());
     let split_name = if tcx.sess.target_can_use_split_dwarf() {
         output_filenames
@@ -847,7 +852,7 @@ pub fn build_compile_unit_di_node<'ll, 'tcx>(
             producer.as_ptr().cast(),
             producer.len(),
             tcx.sess.opts.optimize != config::OptLevel::No,
-            c"".as_ptr().cast(),
+            flags.as_ptr().cast(),
             0,
             // NB: this doesn't actually have any perceptible effect, it seems. LLVM will instead
             // put the path supplied to `MCSplitDwarfFile` into the debug info of the final
@@ -876,7 +881,8 @@ pub fn build_compile_unit_di_node<'ll, 'tcx>(
             );
             let val = llvm::LLVMMetadataAsValue(debug_context.llcontext, gcov_metadata);
 
-            llvm::LLVMAddNamedMetadataOperand(debug_context.llmod, c"llvm.gcov".as_ptr(), val);
+            let llvm_gcov_ident = cstr!("llvm.gcov");
+            llvm::LLVMAddNamedMetadataOperand(debug_context.llmod, llvm_gcov_ident.as_ptr(), val);
         }
 
         // Insert `llvm.ident` metadata on the wasm targets since that will
@@ -889,7 +895,7 @@ pub fn build_compile_unit_di_node<'ll, 'tcx>(
             );
             llvm::LLVMAddNamedMetadataOperand(
                 debug_context.llmod,
-                c"llvm.ident".as_ptr(),
+                cstr!("llvm.ident").as_ptr(),
                 llvm::LLVMMDNodeInContext(debug_context.llcontext, &name_metadata, 1),
             );
         }
@@ -1295,7 +1301,7 @@ fn build_vtable_type_di_node<'ll, 'tcx>(
 
     // All function pointers are described as opaque pointers. This could be improved in the future
     // by describing them as actual function pointers.
-    let void_pointer_ty = tcx.mk_imm_ptr(tcx.types.unit);
+    let void_pointer_ty = Ty::new_imm_ptr(tcx, tcx.types.unit);
     let void_pointer_type_di_node = type_di_node(cx, void_pointer_ty);
     let usize_di_node = type_di_node(cx, tcx.types.usize);
     let (pointer_size, pointer_align) = cx.size_and_align_of(void_pointer_ty);
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
index c2f16cad3fc..b924c771af7 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
@@ -113,7 +113,7 @@ impl<'ll, 'tcx> CodegenUnitDebugContext<'ll, 'tcx> {
                 llvm::LLVMRustAddModuleFlag(
                     self.llmod,
                     llvm::LLVMModFlagBehavior::Warning,
-                    c"Dwarf Version".as_ptr().cast(),
+                    "Dwarf Version\0".as_ptr().cast(),
                     dwarf_version,
                 );
             } else {
@@ -121,16 +121,17 @@ impl<'ll, 'tcx> CodegenUnitDebugContext<'ll, 'tcx> {
                 llvm::LLVMRustAddModuleFlag(
                     self.llmod,
                     llvm::LLVMModFlagBehavior::Warning,
-                    c"CodeView".as_ptr().cast(),
+                    "CodeView\0".as_ptr().cast(),
                     1,
                 )
             }
 
             // Prevent bitcode readers from deleting the debug info.
+            let ptr = "Debug Info Version\0".as_ptr();
             llvm::LLVMRustAddModuleFlag(
                 self.llmod,
                 llvm::LLVMModFlagBehavior::Warning,
-                c"Debug Info Version".as_ptr().cast(),
+                ptr.cast(),
                 llvm::LLVMRustDebugMetadataVersion(),
             );
         }
@@ -453,7 +454,7 @@ impl<'ll, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
                         ty::Array(ct, _)
                             if (*ct == cx.tcx.types.u8) || cx.layout_of(*ct).is_zst() =>
                         {
-                            cx.tcx.mk_imm_ptr(*ct)
+                            Ty::new_imm_ptr(cx.tcx, *ct)
                         }
                         _ => t,
                     };
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs b/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs
index 6bcd3e5bf58..7be83638676 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs
@@ -82,8 +82,8 @@ pub(crate) fn fat_pointer_kind<'ll, 'tcx>(
         ty::Foreign(_) => {
             // Assert that pointers to foreign types really are thin:
             debug_assert_eq!(
-                cx.size_of(cx.tcx.mk_imm_ptr(pointee_tail_ty)),
-                cx.size_of(cx.tcx.mk_imm_ptr(cx.tcx.types.u8))
+                cx.size_of(Ty::new_imm_ptr(cx.tcx, pointee_tail_ty)),
+                cx.size_of(Ty::new_imm_ptr(cx.tcx, cx.tcx.types.u8))
             );
             None
         }
diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs
index 31bafa87814..a254c86c291 100644
--- a/compiler/rustc_codegen_llvm/src/intrinsic.rs
+++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs
@@ -873,23 +873,29 @@ fn get_rust_try_fn<'ll, 'tcx>(
 
     // Define the type up front for the signature of the rust_try function.
     let tcx = cx.tcx;
-    let i8p = tcx.mk_mut_ptr(tcx.types.i8);
+    let i8p = Ty::new_mut_ptr(tcx, tcx.types.i8);
     // `unsafe fn(*mut i8) -> ()`
-    let try_fn_ty = tcx.mk_fn_ptr(ty::Binder::dummy(tcx.mk_fn_sig(
-        [i8p],
-        tcx.mk_unit(),
-        false,
-        hir::Unsafety::Unsafe,
-        Abi::Rust,
-    )));
+    let try_fn_ty = Ty::new_fn_ptr(
+        tcx,
+        ty::Binder::dummy(tcx.mk_fn_sig(
+            [i8p],
+            Ty::new_unit(tcx),
+            false,
+            hir::Unsafety::Unsafe,
+            Abi::Rust,
+        )),
+    );
     // `unsafe fn(*mut i8, *mut i8) -> ()`
-    let catch_fn_ty = tcx.mk_fn_ptr(ty::Binder::dummy(tcx.mk_fn_sig(
-        [i8p, i8p],
-        tcx.mk_unit(),
-        false,
-        hir::Unsafety::Unsafe,
-        Abi::Rust,
-    )));
+    let catch_fn_ty = Ty::new_fn_ptr(
+        tcx,
+        ty::Binder::dummy(tcx.mk_fn_sig(
+            [i8p, i8p],
+            Ty::new_unit(tcx),
+            false,
+            hir::Unsafety::Unsafe,
+            Abi::Rust,
+        )),
+    );
     // `unsafe fn(unsafe fn(*mut i8) -> (), *mut i8, unsafe fn(*mut i8, *mut i8) -> ()) -> i32`
     let rust_fn_sig = ty::Binder::dummy(cx.tcx.mk_fn_sig(
         [try_fn_ty, i8p, catch_fn_ty],
diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs
index 24968e00cc8..24ba28bbc82 100644
--- a/compiler/rustc_codegen_llvm/src/lib.rs
+++ b/compiler/rustc_codegen_llvm/src/lib.rs
@@ -11,7 +11,6 @@
 #![feature(let_chains)]
 #![feature(never_type)]
 #![feature(impl_trait_in_assoc_type)]
-#![feature(c_str_literals)]
 #![recursion_limit = "256"]
 #![allow(rustc::potential_query_instability)]
 #![deny(rustc::untranslatable_diagnostic)]
diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
index b667bc4f6d4..3ad546b61e9 100644
--- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
@@ -1,7 +1,7 @@
 #![allow(non_camel_case_types)]
 #![allow(non_upper_case_globals)]
 
-use rustc_codegen_ssa::coverageinfo::map as coverage_map;
+use crate::coverageinfo::map_data as coverage_map;
 
 use super::debuginfo::{
     DIArray, DIBasicType, DIBuilder, DICompositeType, DIDerivedType, DIDescriptor, DIEnumerator,
@@ -585,6 +585,16 @@ pub enum ThreadLocalMode {
     LocalExec,
 }
 
+/// LLVMRustTailCallKind
+#[derive(Copy, Clone)]
+#[repr(C)]
+pub enum TailCallKind {
+    None,
+    Tail,
+    MustTail,
+    NoTail,
+}
+
 /// LLVMRustChecksumKind
 #[derive(Copy, Clone)]
 #[repr(C)]
@@ -1196,6 +1206,7 @@ extern "C" {
         NameLen: size_t,
     ) -> Option<&Value>;
     pub fn LLVMSetTailCall(CallInst: &Value, IsTailCall: Bool);
+    pub fn LLVMRustSetTailCallKind(CallInst: &Value, TKC: TailCallKind);
 
     // Operations on attributes
     pub fn LLVMRustCreateAttrNoValue(C: &Context, attr: AttributeKind) -> &Attribute;
diff --git a/compiler/rustc_codegen_llvm/src/type_of.rs b/compiler/rustc_codegen_llvm/src/type_of.rs
index 3339e4e07ed..58e97be34f2 100644
--- a/compiler/rustc_codegen_llvm/src/type_of.rs
+++ b/compiler/rustc_codegen_llvm/src/type_of.rs
@@ -337,12 +337,13 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyAndLayout<'tcx> {
             // only wide pointer boxes are handled as pointers
             // thin pointer boxes with scalar allocators are handled by the general logic below
             ty::Adt(def, substs) if def.is_box() && cx.layout_of(substs.type_at(1)).is_zst() => {
-                let ptr_ty = cx.tcx.mk_mut_ptr(self.ty.boxed_ty());
+                let ptr_ty = Ty::new_mut_ptr(cx.tcx, self.ty.boxed_ty());
                 return cx.layout_of(ptr_ty).scalar_pair_element_llvm_type(cx, index, immediate);
             }
             // `dyn* Trait` has the same ABI as `*mut dyn Trait`
             ty::Dynamic(bounds, region, ty::DynStar) => {
-                let ptr_ty = cx.tcx.mk_mut_ptr(cx.tcx.mk_dynamic(bounds, region, ty::Dyn));
+                let ptr_ty =
+                    Ty::new_mut_ptr(cx.tcx, Ty::new_dynamic(cx.tcx, bounds, region, ty::Dyn));
                 return cx.layout_of(ptr_ty).scalar_pair_element_llvm_type(cx, index, immediate);
             }
             _ => {}
diff --git a/compiler/rustc_codegen_llvm/src/va_arg.rs b/compiler/rustc_codegen_llvm/src/va_arg.rs
index b19398e68c2..8800caa71d6 100644
--- a/compiler/rustc_codegen_llvm/src/va_arg.rs
+++ b/compiler/rustc_codegen_llvm/src/va_arg.rs
@@ -73,7 +73,7 @@ fn emit_ptr_va_arg<'ll, 'tcx>(
     let layout = bx.cx.layout_of(target_ty);
     let (llty, size, align) = if indirect {
         (
-            bx.cx.layout_of(bx.cx.tcx.mk_imm_ptr(target_ty)).llvm_type(bx.cx),
+            bx.cx.layout_of(Ty::new_imm_ptr(bx.cx.tcx, target_ty)).llvm_type(bx.cx),
             bx.cx.data_layout().pointer_size,
             bx.cx.data_layout().pointer_align,
         )