diff options
125 files changed, 1698 insertions, 905 deletions
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 25397006ee2..51dd0f81ed1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -69,6 +69,8 @@ jobs: env: CI_JOB_NAME: ${{ matrix.name }} CI_JOB_DOC_URL: ${{ matrix.doc_url }} + GITHUB_WORKFLOW_RUN_ID: ${{ github.run_id }} + GITHUB_REPOSITORY: ${{ github.repository }} CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse # commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs. HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index a1487ca74be..da739b0e453 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -334,8 +334,7 @@ impl<'a> AstValidator<'a> { .filter(|attr| { let arr = [ sym::allow, - sym::cfg, - sym::cfg_attr, + sym::cfg_trace, sym::cfg_attr_trace, sym::deny, sym::expect, diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index cdb18179449..3dbfc191f8f 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -593,7 +593,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere } fn print_attribute_inline(&mut self, attr: &ast::Attribute, is_inline: bool) -> bool { - if attr.has_name(sym::cfg_attr_trace) { + if attr.has_name(sym::cfg_trace) || attr.has_name(sym::cfg_attr_trace) { // It's not a valid identifier, so avoid printing it // to keep the printed code reasonably parse-able. return false; diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index d0bd364425a..8e7b6f083ac 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -1,9 +1,9 @@ //! The entry point of the NLL borrow checker. +use std::io; use std::path::PathBuf; use std::rc::Rc; use std::str::FromStr; -use std::{env, io}; use polonius_engine::{Algorithm, Output}; use rustc_index::IndexSlice; @@ -162,9 +162,8 @@ pub(crate) fn compute_regions<'a, 'tcx>( } if polonius_output { - let algorithm = - env::var("POLONIUS_ALGORITHM").unwrap_or_else(|_| String::from("Hybrid")); - let algorithm = Algorithm::from_str(&algorithm).unwrap(); + let algorithm = infcx.tcx.env_var("POLONIUS_ALGORITHM").unwrap_or("Hybrid"); + let algorithm = Algorithm::from_str(algorithm).unwrap(); debug!("compute_regions: using polonius algorithm {:?}", algorithm); let _prof_timer = infcx.tcx.prof.generic_activity("polonius_analysis"); Some(Box::new(Output::compute(polonius_facts, algorithm, false))) diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl index 95912b01600..954a6014809 100644 --- a/compiler/rustc_codegen_ssa/messages.ftl +++ b/compiler/rustc_codegen_ssa/messages.ftl @@ -10,8 +10,6 @@ codegen_ssa_apple_deployment_target_invalid = codegen_ssa_apple_deployment_target_too_low = deployment target in {$env_var} was set to {$version}, but the minimum supported by `rustc` is {$os_min} -codegen_ssa_apple_sdk_error_sdk_path = failed to get {$sdk_name} SDK path: {$error} - codegen_ssa_archive_build_failure = failed to build archive at `{$path}`: {$error} codegen_ssa_atomic_compare_exchange = Atomic compare-exchange intrinsic missing failure memory ordering @@ -391,8 +389,6 @@ codegen_ssa_unknown_atomic_ordering = unknown ordering in atomic intrinsic codegen_ssa_unknown_reuse_kind = unknown cgu-reuse-kind `{$kind}` specified -codegen_ssa_unsupported_arch = unsupported arch `{$arch}` for os `{$os}` - codegen_ssa_unsupported_instruction_set = target does not support `#[instruction_set]` codegen_ssa_unsupported_link_self_contained = option `-C link-self-contained` is not supported on this target @@ -402,3 +398,20 @@ codegen_ssa_use_cargo_directive = use the `cargo:rustc-link-lib` directive to sp codegen_ssa_version_script_write_failure = failed to write version script: {$error} codegen_ssa_visual_studio_not_installed = you may need to install Visual Studio build tools with the "C++ build tools" workload + +codegen_ssa_xcrun_command_line_tools_insufficient = + when compiling for iOS, tvOS, visionOS or watchOS, you need a full installation of Xcode + +codegen_ssa_xcrun_failed_invoking = invoking `{$command_formatted}` to find {$sdk_name}.sdk failed: {$error} + +codegen_ssa_xcrun_found_developer_dir = found active developer directory at "{$developer_dir}" + +# `xcrun` already outputs a message about missing Xcode installation, so we only augment it with details about env vars. +codegen_ssa_xcrun_no_developer_dir = + pass the path of an Xcode installation via the DEVELOPER_DIR environment variable, or an SDK with the SDKROOT environment variable + +codegen_ssa_xcrun_sdk_path_warning = output of `xcrun` while finding {$sdk_name}.sdk + .note = {$stderr} + +codegen_ssa_xcrun_unsuccessful = failed running `{$command_formatted}` to find {$sdk_name}.sdk + .note = {$stdout}{$stderr} diff --git a/compiler/rustc_codegen_ssa/src/back/apple.rs b/compiler/rustc_codegen_ssa/src/back/apple.rs index bfa7635a869..2c8b0ec418d 100644 --- a/compiler/rustc_codegen_ssa/src/back/apple.rs +++ b/compiler/rustc_codegen_ssa/src/back/apple.rs @@ -1,16 +1,40 @@ use std::env; +use std::ffi::OsString; use std::fmt::{Display, from_fn}; use std::num::ParseIntError; +use std::path::PathBuf; +use std::process::Command; +use itertools::Itertools; use rustc_middle::middle::exported_symbols::SymbolExportKind; use rustc_session::Session; use rustc_target::spec::Target; +use tracing::debug; -use crate::errors::AppleDeploymentTarget; +use crate::errors::{AppleDeploymentTarget, XcrunError, XcrunSdkPathWarning}; +use crate::fluent_generated as fluent; #[cfg(test)] mod tests; +/// The canonical name of the desired SDK for a given target. +pub(super) fn sdk_name(target: &Target) -> &'static str { + match (&*target.os, &*target.abi) { + ("macos", "") => "MacOSX", + ("ios", "") => "iPhoneOS", + ("ios", "sim") => "iPhoneSimulator", + // Mac Catalyst uses the macOS SDK + ("ios", "macabi") => "MacOSX", + ("tvos", "") => "AppleTVOS", + ("tvos", "sim") => "AppleTVSimulator", + ("visionos", "") => "XROS", + ("visionos", "sim") => "XRSimulator", + ("watchos", "") => "WatchOS", + ("watchos", "sim") => "WatchSimulator", + (os, abi) => unreachable!("invalid os '{os}' / abi '{abi}' combination for Apple target"), + } +} + pub(super) fn macho_platform(target: &Target) -> u32 { match (&*target.os, &*target.abi) { ("macos", _) => object::macho::PLATFORM_MACOS, @@ -253,3 +277,131 @@ pub(super) fn add_version_to_llvm_target( format!("{arch}-{vendor}-{os}{major}.{minor}.{patch}") } } + +pub(super) fn get_sdk_root(sess: &Session) -> Option<PathBuf> { + let sdk_name = sdk_name(&sess.target); + + match xcrun_show_sdk_path(sdk_name, sess.verbose_internals()) { + Ok((path, stderr)) => { + // Emit extra stderr, such as if `-verbose` was passed, or if `xcrun` emitted a warning. + if !stderr.is_empty() { + sess.dcx().emit_warn(XcrunSdkPathWarning { sdk_name, stderr }); + } + Some(path) + } + Err(err) => { + let mut diag = sess.dcx().create_err(err); + + // Recognize common error cases, and give more Rust-specific error messages for those. + if let Some(developer_dir) = xcode_select_developer_dir() { + diag.arg("developer_dir", &developer_dir); + diag.note(fluent::codegen_ssa_xcrun_found_developer_dir); + if developer_dir.as_os_str().to_string_lossy().contains("CommandLineTools") { + if sdk_name != "MacOSX" { + diag.help(fluent::codegen_ssa_xcrun_command_line_tools_insufficient); + } + } + } else { + diag.help(fluent::codegen_ssa_xcrun_no_developer_dir); + } + + diag.emit(); + None + } + } +} + +/// Invoke `xcrun --sdk $sdk_name --show-sdk-path` to get the SDK path. +/// +/// The exact logic that `xcrun` uses is unspecified (see `man xcrun` for a few details), and may +/// change between macOS and Xcode versions, but it roughly boils down to finding the active +/// developer directory, and then invoking `xcodebuild -sdk $sdk_name -version` to get the SDK +/// details. +/// +/// Finding the developer directory is roughly done by looking at, in order: +/// - The `DEVELOPER_DIR` environment variable. +/// - The `/var/db/xcode_select_link` symlink (set by `xcode-select --switch`). +/// - `/Applications/Xcode.app` (hardcoded fallback path). +/// - `/Library/Developer/CommandLineTools` (hardcoded fallback path). +/// +/// Note that `xcrun` caches its result, but with a cold cache this whole operation can be quite +/// slow, especially so the first time it's run after a reboot. +fn xcrun_show_sdk_path( + sdk_name: &'static str, + verbose: bool, +) -> Result<(PathBuf, String), XcrunError> { + let mut cmd = Command::new("xcrun"); + if verbose { + cmd.arg("--verbose"); + } + // The `--sdk` parameter is the same as in xcodebuild, namely either an absolute path to an SDK, + // or the (lowercase) canonical name of an SDK. + cmd.arg("--sdk"); + cmd.arg(&sdk_name.to_lowercase()); + cmd.arg("--show-sdk-path"); + + // We do not stream stdout/stderr lines directly to the user, since whether they are warnings or + // errors depends on the status code at the end. + let output = cmd.output().map_err(|error| XcrunError::FailedInvoking { + sdk_name, + command_formatted: format!("{cmd:?}"), + error, + })?; + + // It is fine to do lossy conversion here, non-UTF-8 paths are quite rare on macOS nowadays + // (only possible with the HFS+ file system), and we only use it for error messages. + let stderr = String::from_utf8_lossy_owned(output.stderr); + if !stderr.is_empty() { + debug!(stderr, "original xcrun stderr"); + } + + // Some versions of `xcodebuild` output beefy errors when invoked via `xcrun`, + // but these are usually red herrings. + let stderr = stderr + .lines() + .filter(|line| { + !line.contains("Writing error result bundle") + && !line.contains("Requested but did not find extension point with identifier") + }) + .join("\n"); + + if output.status.success() { + Ok((stdout_to_path(output.stdout), stderr)) + } else { + // Output both stdout and stderr, since shims of `xcrun` (such as the one provided by + // nixpkgs), do not always use stderr for errors. + let stdout = String::from_utf8_lossy_owned(output.stdout).trim().to_string(); + Err(XcrunError::Unsuccessful { + sdk_name, + command_formatted: format!("{cmd:?}"), + stdout, + stderr, + }) + } +} + +/// Invoke `xcode-select --print-path`, and return the current developer directory. +/// +/// NOTE: We don't do any error handling here, this is only used as a canary in diagnostics (`xcrun` +/// will have already emitted the relevant error information). +fn xcode_select_developer_dir() -> Option<PathBuf> { + let mut cmd = Command::new("xcode-select"); + cmd.arg("--print-path"); + let output = cmd.output().ok()?; + if !output.status.success() { + return None; + } + Some(stdout_to_path(output.stdout)) +} + +fn stdout_to_path(mut stdout: Vec<u8>) -> PathBuf { + // Remove trailing newline. + if let Some(b'\n') = stdout.last() { + let _ = stdout.pop().unwrap(); + } + #[cfg(unix)] + let path = <OsString as std::os::unix::ffi::OsStringExt>::from_vec(stdout); + #[cfg(not(unix))] // Unimportant, this is only used on macOS + let path = OsString::from(String::from_utf8(stdout).unwrap()); + PathBuf::from(path) +} diff --git a/compiler/rustc_codegen_ssa/src/back/apple/tests.rs b/compiler/rustc_codegen_ssa/src/back/apple/tests.rs index 7ccda5a8190..8df740a4bcf 100644 --- a/compiler/rustc_codegen_ssa/src/back/apple/tests.rs +++ b/compiler/rustc_codegen_ssa/src/back/apple/tests.rs @@ -1,4 +1,4 @@ -use super::{add_version_to_llvm_target, parse_version}; +use super::*; #[test] fn test_add_version_to_llvm_target() { @@ -19,3 +19,69 @@ fn test_parse_version() { assert_eq!(parse_version("10.12.6"), Ok((10, 12, 6))); assert_eq!(parse_version("9999.99.99"), Ok((9999, 99, 99))); } + +#[test] +#[cfg_attr(not(target_os = "macos"), ignore = "xcode-select is only available on macOS")] +fn lookup_developer_dir() { + let _developer_dir = xcode_select_developer_dir().unwrap(); +} + +#[test] +#[cfg_attr(not(target_os = "macos"), ignore = "xcrun is only available on macOS")] +fn lookup_sdk() { + let (sdk_path, stderr) = xcrun_show_sdk_path("MacOSX", false).unwrap(); + // Check that the found SDK is valid. + assert!(sdk_path.join("SDKSettings.plist").exists()); + assert_eq!(stderr, ""); + + // Test that the SDK root is a subdir of the developer directory. + if let Some(developer_dir) = xcode_select_developer_dir() { + // Only run this test if SDKROOT is not set (otherwise xcrun may look up via. that). + if std::env::var_os("SDKROOT").is_some() { + assert!(sdk_path.starts_with(&developer_dir)); + } + } +} + +#[test] +#[cfg_attr(not(target_os = "macos"), ignore = "xcrun is only available on macOS")] +fn lookup_sdk_verbose() { + let (_, stderr) = xcrun_show_sdk_path("MacOSX", true).unwrap(); + // Newer xcrun versions should emit something like this: + // + // xcrun: note: looking up SDK with 'xcodebuild -sdk macosx -version Path' + // xcrun: note: xcrun_db = '/var/.../xcrun_db' + // xcrun: note: lookup resolved to: '...' + // xcrun: note: database key is: ... + // + // Or if the value is already cached, something like this: + // + // xcrun: note: database key is: ... + // xcrun: note: lookup resolved in '/var/.../xcrun_db' : '...' + assert!( + stderr.contains("xcrun: note: lookup resolved"), + "stderr should contain lookup note: {stderr}", + ); +} + +#[test] +#[cfg_attr(not(target_os = "macos"), ignore = "xcrun is only available on macOS")] +fn try_lookup_invalid_sdk() { + // As a proxy for testing all the different ways that `xcrun` can fail, + // test the case where an SDK was not found. + let err = xcrun_show_sdk_path("invalid", false).unwrap_err(); + let XcrunError::Unsuccessful { stderr, .. } = err else { + panic!("unexpected error kind: {err:?}"); + }; + // Either one of (depending on if using Command Line Tools or full Xcode): + // xcrun: error: SDK "invalid" cannot be located + // xcodebuild: error: SDK "invalid" cannot be located. + assert!( + stderr.contains(r#"error: SDK "invalid" cannot be located"#), + "stderr should contain xcodebuild note: {stderr}", + ); + assert!( + stderr.contains("xcrun: error: unable to lookup item 'Path' in SDK 'invalid'"), + "stderr should contain xcrun note: {stderr}", + ); +} diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index a564e0e391f..b59d73a9aae 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -1560,17 +1560,13 @@ fn print_native_static_libs( match out { OutFileName::Real(path) => { out.overwrite(&lib_args.join(" "), sess); - if !lib_args.is_empty() { - sess.dcx().emit_note(errors::StaticLibraryNativeArtifactsToFile { path }); - } + sess.dcx().emit_note(errors::StaticLibraryNativeArtifactsToFile { path }); } OutFileName::Stdout => { - if !lib_args.is_empty() { - sess.dcx().emit_note(errors::StaticLibraryNativeArtifacts); - // Prefix for greppability - // Note: This must not be translated as tools are allowed to depend on this exact string. - sess.dcx().note(format!("native-static-libs: {}", lib_args.join(" "))); - } + sess.dcx().emit_note(errors::StaticLibraryNativeArtifacts); + // Prefix for greppability + // Note: This must not be translated as tools are allowed to depend on this exact string. + sess.dcx().note(format!("native-static-libs: {}", lib_args.join(" "))); } } } @@ -3205,9 +3201,7 @@ fn add_apple_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavo } fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) -> Option<PathBuf> { - let arch = &sess.target.arch; let os = &sess.target.os; - let llvm_target = &sess.target.llvm_target; if sess.target.vendor != "apple" || !matches!(os.as_ref(), "ios" | "tvos" | "watchos" | "visionos" | "macos") || !matches!(flavor, LinkerFlavor::Darwin(..)) @@ -3219,37 +3213,7 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) -> return None; } - let sdk_name = match (arch.as_ref(), os.as_ref()) { - ("aarch64", "tvos") if llvm_target.ends_with("-simulator") => "appletvsimulator", - ("aarch64", "tvos") => "appletvos", - ("x86_64", "tvos") => "appletvsimulator", - ("arm", "ios") => "iphoneos", - ("aarch64", "ios") if llvm_target.contains("macabi") => "macosx", - ("aarch64", "ios") if llvm_target.ends_with("-simulator") => "iphonesimulator", - ("aarch64", "ios") => "iphoneos", - ("x86", "ios") => "iphonesimulator", - ("x86_64", "ios") if llvm_target.contains("macabi") => "macosx", - ("x86_64", "ios") => "iphonesimulator", - ("x86_64", "watchos") => "watchsimulator", - ("arm64_32", "watchos") => "watchos", - ("aarch64", "watchos") if llvm_target.ends_with("-simulator") => "watchsimulator", - ("aarch64", "watchos") => "watchos", - ("aarch64", "visionos") if llvm_target.ends_with("-simulator") => "xrsimulator", - ("aarch64", "visionos") => "xros", - ("arm", "watchos") => "watchos", - (_, "macos") => "macosx", - _ => { - sess.dcx().emit_err(errors::UnsupportedArch { arch, os }); - return None; - } - }; - let sdk_root = match get_apple_sdk_root(sdk_name) { - Ok(s) => s, - Err(e) => { - sess.dcx().emit_err(e); - return None; - } - }; + let sdk_root = sess.time("get_apple_sdk_root", || get_apple_sdk_root(sess))?; match flavor { LinkerFlavor::Darwin(Cc::Yes, _) => { @@ -3259,28 +3223,32 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) -> // This is admittedly a bit strange, as on most targets // `-isysroot` only applies to include header files, but on Apple // targets this also applies to libraries and frameworks. - cmd.cc_args(&["-isysroot", &sdk_root]); + cmd.cc_arg("-isysroot"); + cmd.cc_arg(&sdk_root); } LinkerFlavor::Darwin(Cc::No, _) => { - cmd.link_args(&["-syslibroot", &sdk_root]); + cmd.link_arg("-syslibroot"); + cmd.link_arg(&sdk_root); } _ => unreachable!(), } - Some(sdk_root.into()) + Some(sdk_root) } -fn get_apple_sdk_root(sdk_name: &str) -> Result<String, errors::AppleSdkRootError<'_>> { - // Following what clang does - // (https://github.com/llvm/llvm-project/blob/ - // 296a80102a9b72c3eda80558fb78a3ed8849b341/clang/lib/Driver/ToolChains/Darwin.cpp#L1661-L1678) - // to allow the SDK path to be set. (For clang, xcrun sets - // SDKROOT; for rustc, the user or build system can set it, or we - // can fall back to checking for xcrun on PATH.) +fn get_apple_sdk_root(sess: &Session) -> Option<PathBuf> { if let Ok(sdkroot) = env::var("SDKROOT") { - let p = Path::new(&sdkroot); - match sdk_name { - // Ignore `SDKROOT` if it's clearly set for the wrong platform. + let p = PathBuf::from(&sdkroot); + + // Ignore invalid SDKs, similar to what clang does: + // https://github.com/llvm/llvm-project/blob/llvmorg-19.1.6/clang/lib/Driver/ToolChains/Darwin.cpp#L2212-L2229 + // + // NOTE: Things are complicated here by the fact that `rustc` can be run by Cargo to compile + // build scripts and proc-macros for the host, and thus we need to ignore SDKROOT if it's + // clearly set for the wrong platform. + // + // FIXME(madsmtm): Make this more robust (maybe read `SDKSettings.json` like Clang does?). + match &*apple::sdk_name(&sess.target).to_lowercase() { "appletvos" if sdkroot.contains("TVSimulator.platform") || sdkroot.contains("MacOSX.platform") => {} @@ -3307,26 +3275,11 @@ fn get_apple_sdk_root(sdk_name: &str) -> Result<String, errors::AppleSdkRootErro if sdkroot.contains("XROS.platform") || sdkroot.contains("MacOSX.platform") => {} // Ignore `SDKROOT` if it's not a valid path. _ if !p.is_absolute() || p == Path::new("/") || !p.exists() => {} - _ => return Ok(sdkroot), + _ => return Some(p), } } - let res = - Command::new("xcrun").arg("--show-sdk-path").arg("-sdk").arg(sdk_name).output().and_then( - |output| { - if output.status.success() { - Ok(String::from_utf8(output.stdout).unwrap()) - } else { - let error = String::from_utf8(output.stderr); - let error = format!("process exit with error: {}", error.unwrap()); - Err(io::Error::new(io::ErrorKind::Other, &error[..])) - } - }, - ); - match res { - Ok(output) => Ok(output.trim().to_string()), - Err(error) => Err(errors::AppleSdkRootError::SdkPath { sdk_name, error }), - } + apple::get_sdk_root(sess) } /// When using the linker flavors opting in to `lld`, add the necessary paths and arguments to diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs index 0b7cad0c2fd..f52d29baf9d 100644 --- a/compiler/rustc_codegen_ssa/src/errors.rs +++ b/compiler/rustc_codegen_ssa/src/errors.rs @@ -739,13 +739,6 @@ pub enum ExtractBundledLibsError<'a> { } #[derive(Diagnostic)] -#[diag(codegen_ssa_unsupported_arch)] -pub(crate) struct UnsupportedArch<'a> { - pub arch: &'a str, - pub os: &'a str, -} - -#[derive(Diagnostic)] pub(crate) enum AppleDeploymentTarget { #[diag(codegen_ssa_apple_deployment_target_invalid)] Invalid { env_var: &'static str, error: ParseIntError }, @@ -754,12 +747,6 @@ pub(crate) enum AppleDeploymentTarget { } #[derive(Diagnostic)] -pub(crate) enum AppleSdkRootError<'a> { - #[diag(codegen_ssa_apple_sdk_error_sdk_path)] - SdkPath { sdk_name: &'a str, error: Error }, -} - -#[derive(Diagnostic)] #[diag(codegen_ssa_read_file)] pub(crate) struct ReadFileError { pub message: std::io::Error, @@ -1334,3 +1321,26 @@ pub(crate) struct MixedExportNameAndNoMangle { #[suggestion(style = "verbose", code = "", applicability = "machine-applicable")] pub removal_span: Span, } + +#[derive(Diagnostic, Debug)] +pub(crate) enum XcrunError { + #[diag(codegen_ssa_xcrun_failed_invoking)] + FailedInvoking { sdk_name: &'static str, command_formatted: String, error: std::io::Error }, + + #[diag(codegen_ssa_xcrun_unsuccessful)] + #[note] + Unsuccessful { + sdk_name: &'static str, + command_formatted: String, + stdout: String, + stderr: String, + }, +} + +#[derive(Diagnostic, Debug)] +#[diag(codegen_ssa_xcrun_sdk_path_warning)] +#[note] +pub(crate) struct XcrunSdkPathWarning { + pub sdk_name: &'static str, + pub stderr: String, +} diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index 93c34a2f576..d26d6edf314 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -13,6 +13,7 @@ #![feature(let_chains)] #![feature(negative_impls)] #![feature(rustdoc_internals)] +#![feature(string_from_utf8_lossy_owned)] #![feature(trait_alias)] #![feature(try_blocks)] // tidy-alphabetical-end diff --git a/compiler/rustc_data_structures/src/stable_hasher.rs b/compiler/rustc_data_structures/src/stable_hasher.rs index ffbe54d6206..3a64c924cc2 100644 --- a/compiler/rustc_data_structures/src/stable_hasher.rs +++ b/compiler/rustc_data_structures/src/stable_hasher.rs @@ -564,6 +564,8 @@ where } } +impl_stable_traits_for_trivial_type!(::std::ffi::OsStr); + impl_stable_traits_for_trivial_type!(::std::path::Path); impl_stable_traits_for_trivial_type!(::std::path::PathBuf); diff --git a/compiler/rustc_errors/src/snippet.rs b/compiler/rustc_errors/src/snippet.rs index 8485d7087cf..f09c2ed5534 100644 --- a/compiler/rustc_errors/src/snippet.rs +++ b/compiler/rustc_errors/src/snippet.rs @@ -159,11 +159,7 @@ impl Annotation { /// Length of this annotation as displayed in the stderr output pub(crate) fn len(&self) -> usize { // Account for usize underflows - if self.end_col.display > self.start_col.display { - self.end_col.display - self.start_col.display - } else { - self.start_col.display - self.end_col.display - } + self.end_col.display.abs_diff(self.start_col.display) } pub(crate) fn has_label(&self) -> bool { diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index ada49eef7b2..bcc2703c39b 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -156,6 +156,19 @@ pub fn pre_configure_attrs(sess: &Session, attrs: &[Attribute]) -> ast::AttrVec .collect() } +pub(crate) fn attr_into_trace(mut attr: Attribute, trace_name: Symbol) -> Attribute { + match &mut attr.kind { + AttrKind::Normal(normal) => { + let NormalAttr { item, tokens } = &mut **normal; + item.path.segments[0].ident.name = trace_name; + // This makes the trace attributes unobservable to token-based proc macros. + *tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::default())); + } + AttrKind::DocComment(..) => unreachable!(), + } + attr +} + #[macro_export] macro_rules! configure { ($this:ident, $node:ident) => { @@ -280,16 +293,7 @@ impl<'a> StripUnconfigured<'a> { // A trace attribute left in AST in place of the original `cfg_attr` attribute. // It can later be used by lints or other diagnostics. - let mut trace_attr = cfg_attr.clone(); - match &mut trace_attr.kind { - AttrKind::Normal(normal) => { - let NormalAttr { item, tokens } = &mut **normal; - item.path.segments[0].ident.name = sym::cfg_attr_trace; - // This makes the trace attributes unobservable to token-based proc macros. - *tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::default())); - } - AttrKind::DocComment(..) => unreachable!(), - } + let trace_attr = attr_into_trace(cfg_attr.clone(), sym::cfg_attr_trace); let Some((cfg_predicate, expanded_attrs)) = rustc_parse::parse_cfg_attr(cfg_attr, &self.sess.psess) diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index d0bd8a89d9b..22da1179feb 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -33,7 +33,7 @@ use rustc_span::{ErrorGuaranteed, FileName, Ident, LocalExpnId, Span, sym}; use smallvec::SmallVec; use crate::base::*; -use crate::config::StripUnconfigured; +use crate::config::{StripUnconfigured, attr_into_trace}; use crate::errors::{ EmptyDelegationMac, GlobDelegationOutsideImpls, GlobDelegationTraitlessQpath, IncompleteParse, RecursionLimitReached, RemoveExprNotSupported, RemoveNodeNotSupported, UnsupportedKeyValue, @@ -2003,7 +2003,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { let attr_name = attr.ident().unwrap().name; // `#[cfg]` and `#[cfg_attr]` are special - they are // eagerly evaluated. - if attr_name != sym::cfg && attr_name != sym::cfg_attr_trace { + if attr_name != sym::cfg_trace && attr_name != sym::cfg_attr_trace { self.cx.sess.psess.buffer_lint( UNUSED_ATTRIBUTES, attr.span, @@ -2027,11 +2027,10 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { ) -> (bool, Option<ast::MetaItem>) { let (res, meta_item) = self.cfg().cfg_true(&attr); if res { - // FIXME: `cfg(TRUE)` attributes do not currently remove themselves during expansion, - // and some tools like rustdoc and clippy rely on that. Find a way to remove them - // while keeping the tools working. - self.cx.expanded_inert_attrs.mark(&attr); - node.visit_attrs(|attrs| attrs.insert(pos, attr)); + // A trace attribute left in AST in place of the original `cfg` attribute. + // It can later be used by lints or other diagnostics. + let trace_attr = attr_into_trace(attr, sym::cfg_trace); + node.visit_attrs(|attrs| attrs.insert(pos, trace_attr)); } (res, meta_item) diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index fd936458f11..6fe65c88f71 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -760,10 +760,14 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ template!(Word, List: r#""...""#), DuplicatesOk, EncodeCrossCrate::Yes, INTERNAL_UNSTABLE ), - // Trace that is left when a `cfg_attr` attribute is expanded. - // The attribute is not gated, to avoid stability errors, but it cannot be used in stable or - // unstable code directly because `sym::cfg_attr_trace` is not a valid identifier, it can only - // be generated by the compiler. + // Traces that are left when `cfg` and `cfg_attr` attributes are expanded. + // The attributes are not gated, to avoid stability errors, but they cannot be used in stable + // or unstable code directly because `sym::cfg_(attr_)trace` are not valid identifiers, they + // can only be generated by the compiler. + ungated!( + cfg_trace, Normal, template!(Word /* irrelevant */), DuplicatesOk, + EncodeCrossCrate::No + ), ungated!( cfg_attr_trace, Normal, template!(Word /* irrelevant */), DuplicatesOk, EncodeCrossCrate::No diff --git a/compiler/rustc_hir_analysis/src/check/region.rs b/compiler/rustc_hir_analysis/src/check/region.rs index 255f5fee52a..ba8124b11fc 100644 --- a/compiler/rustc_hir_analysis/src/check/region.rs +++ b/compiler/rustc_hir_analysis/src/check/region.rs @@ -25,12 +25,18 @@ use tracing::debug; struct Context { /// The scope that contains any new variables declared, plus its depth in /// the scope tree. - var_parent: Option<(Scope, ScopeDepth)>, + var_parent: Option<Scope>, /// Region parent of expressions, etc., plus its depth in the scope tree. parent: Option<(Scope, ScopeDepth)>, } +impl Context { + fn set_var_parent(&mut self) { + self.var_parent = self.parent.map(|(p, _)| p); + } +} + struct ScopeResolutionVisitor<'tcx> { tcx: TyCtxt<'tcx>, @@ -78,7 +84,7 @@ fn record_var_lifetime(visitor: &mut ScopeResolutionVisitor<'_>, var_id: hir::It // // extern fn isalnum(c: c_int) -> c_int } - Some((parent_scope, _)) => visitor.scope_tree.record_var_scope(var_id, parent_scope), + Some(parent_scope) => visitor.scope_tree.record_var_scope(var_id, parent_scope), } } @@ -113,7 +119,7 @@ fn resolve_block<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, blk: &'tcx hi // itself has returned. visitor.enter_node_scope_with_dtor(blk.hir_id.local_id); - visitor.cx.var_parent = visitor.cx.parent; + visitor.cx.set_var_parent(); { // This block should be kept approximately in sync with @@ -132,7 +138,7 @@ fn resolve_block<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, blk: &'tcx hi local_id: blk.hir_id.local_id, data: ScopeData::Remainder(FirstStatementIndex::new(i)), }); - visitor.cx.var_parent = visitor.cx.parent; + visitor.cx.set_var_parent(); visitor.visit_stmt(statement); // We need to back out temporarily to the last enclosing scope // for the `else` block, so that even the temporaries receiving @@ -157,7 +163,7 @@ fn resolve_block<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, blk: &'tcx hi local_id: blk.hir_id.local_id, data: ScopeData::Remainder(FirstStatementIndex::new(i)), }); - visitor.cx.var_parent = visitor.cx.parent; + visitor.cx.set_var_parent(); visitor.visit_stmt(statement) } hir::StmtKind::Item(..) => { @@ -207,7 +213,7 @@ fn resolve_arm<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, arm: &'tcx hir: visitor.terminating_scopes.insert(arm.hir_id.local_id); visitor.enter_node_scope_with_dtor(arm.hir_id.local_id); - visitor.cx.var_parent = visitor.cx.parent; + visitor.cx.set_var_parent(); if let Some(expr) = arm.guard && !has_let_expr(expr) @@ -221,8 +227,6 @@ fn resolve_arm<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, arm: &'tcx hir: } fn resolve_pat<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, pat: &'tcx hir::Pat<'tcx>) { - visitor.record_child_scope(Scope { local_id: pat.hir_id.local_id, data: ScopeData::Node }); - // If this is a binding then record the lifetime of that binding. if let PatKind::Binding(..) = pat.kind { record_var_lifetime(visitor, pat.hir_id.local_id); @@ -486,7 +490,7 @@ fn resolve_expr<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, expr: &'tcx hi ScopeData::IfThen }; visitor.enter_scope(Scope { local_id: then.hir_id.local_id, data }); - visitor.cx.var_parent = visitor.cx.parent; + visitor.cx.set_var_parent(); visitor.visit_expr(cond); visitor.visit_expr(then); visitor.cx = expr_cx; @@ -501,7 +505,7 @@ fn resolve_expr<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, expr: &'tcx hi ScopeData::IfThen }; visitor.enter_scope(Scope { local_id: then.hir_id.local_id, data }); - visitor.cx.var_parent = visitor.cx.parent; + visitor.cx.set_var_parent(); visitor.visit_expr(cond); visitor.visit_expr(then); visitor.cx = expr_cx; @@ -560,7 +564,7 @@ fn resolve_local<'tcx>( ) { debug!("resolve_local(pat={:?}, init={:?})", pat, init); - let blk_scope = visitor.cx.var_parent.map(|(p, _)| p); + let blk_scope = visitor.cx.var_parent; // As an exception to the normal rules governing temporary // lifetimes, initializers in a let have a temporary lifetime @@ -625,10 +629,7 @@ fn resolve_local<'tcx>( if is_binding_pat(pat) { visitor.scope_tree.record_rvalue_candidate( expr.hir_id, - RvalueCandidateType::Pattern { - target: expr.hir_id.local_id, - lifetime: blk_scope, - }, + RvalueCandidate { target: expr.hir_id.local_id, lifetime: blk_scope }, ); } } @@ -733,10 +734,7 @@ fn resolve_local<'tcx>( record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id); visitor.scope_tree.record_rvalue_candidate( subexpr.hir_id, - RvalueCandidateType::Borrow { - target: subexpr.hir_id.local_id, - lifetime: blk_id, - }, + RvalueCandidate { target: subexpr.hir_id.local_id, lifetime: blk_id }, ); } hir::ExprKind::Struct(_, fields, _) => { @@ -857,13 +855,12 @@ impl<'tcx> Visitor<'tcx> for ScopeResolutionVisitor<'tcx> { self.enter_body(body.value.hir_id, |this| { if this.tcx.hir_body_owner_kind(owner_id).is_fn_or_closure() { // The arguments and `self` are parented to the fn. - this.cx.var_parent = this.cx.parent.take(); + this.cx.set_var_parent(); for param in body.params { this.visit_pat(param.pat); } // The body of the every fn is a root scope. - this.cx.parent = this.cx.var_parent; this.visit_expr(body.value) } else { // Only functions have an outer terminating (drop) scope, while diff --git a/compiler/rustc_hir_typeck/src/rvalue_scopes.rs b/compiler/rustc_hir_typeck/src/rvalue_scopes.rs index 98d7f777d6b..973dc7141e6 100644 --- a/compiler/rustc_hir_typeck/src/rvalue_scopes.rs +++ b/compiler/rustc_hir_typeck/src/rvalue_scopes.rs @@ -2,7 +2,7 @@ use hir::Node; use hir::def_id::DefId; use rustc_hir as hir; use rustc_middle::bug; -use rustc_middle::middle::region::{RvalueCandidateType, Scope, ScopeTree}; +use rustc_middle::middle::region::{RvalueCandidate, Scope, ScopeTree}; use rustc_middle::ty::RvalueScopes; use tracing::debug; @@ -55,15 +55,11 @@ fn record_rvalue_scope_rec( fn record_rvalue_scope( rvalue_scopes: &mut RvalueScopes, expr: &hir::Expr<'_>, - candidate: &RvalueCandidateType, + candidate: &RvalueCandidate, ) { debug!("resolve_rvalue_scope(expr={expr:?}, candidate={candidate:?})"); - match candidate { - RvalueCandidateType::Borrow { lifetime, .. } - | RvalueCandidateType::Pattern { lifetime, .. } => { - record_rvalue_scope_rec(rvalue_scopes, expr, *lifetime) - } // FIXME(@dingxiangfei2009): handle the candidates in the function call arguments - } + record_rvalue_scope_rec(rvalue_scopes, expr, candidate.lifetime) + // FIXME(@dingxiangfei2009): handle the candidates in the function call arguments } pub(crate) fn resolve_rvalue_scopes<'a, 'tcx>( diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 8be7ba7455e..2440f0639c8 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -1,5 +1,5 @@ use std::any::Any; -use std::ffi::OsString; +use std::ffi::{OsStr, OsString}; use std::io::{self, BufWriter, Write}; use std::path::{Path, PathBuf}; use std::sync::{Arc, LazyLock, OnceLock}; @@ -361,6 +361,31 @@ fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) { ) } +fn env_var_os<'tcx>(tcx: TyCtxt<'tcx>, key: &'tcx OsStr) -> Option<&'tcx OsStr> { + let value = env::var_os(key); + + let value_tcx = value.as_ref().map(|value| { + let encoded_bytes = tcx.arena.alloc_slice(value.as_encoded_bytes()); + debug_assert_eq!(value.as_encoded_bytes(), encoded_bytes); + // SAFETY: The bytes came from `as_encoded_bytes`, and we assume that + // `alloc_slice` is implemented correctly, and passes the same bytes + // back (debug asserted above). + unsafe { OsStr::from_encoded_bytes_unchecked(encoded_bytes) } + }); + + // Also add the variable to Cargo's dependency tracking + // + // NOTE: This only works for passes run before `write_dep_info`. See that + // for extension points for configuring environment variables to be + // properly change-tracked. + tcx.sess.psess.env_depinfo.borrow_mut().insert(( + Symbol::intern(&key.to_string_lossy()), + value.as_ref().and_then(|value| value.to_str()).map(|value| Symbol::intern(&value)), + )); + + value_tcx +} + // Returns all the paths that correspond to generated files. fn generated_output_paths( tcx: TyCtxt<'_>, @@ -725,6 +750,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| { |tcx, _| tcx.arena.alloc_from_iter(tcx.resolutions(()).stripped_cfg_items.steal()); providers.resolutions = |tcx, ()| tcx.resolver_for_lowering_raw(()).1; providers.early_lint_checks = early_lint_checks; + providers.env_var_os = env_var_os; limits::provide(providers); proc_macro_decls::provide(providers); rustc_const_eval::provide(providers); diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 333786f0ca3..83d80938b4e 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -18,7 +18,7 @@ use rustc_session::{EarlyDiagCtxt, Session, filesearch}; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::edition::Edition; use rustc_span::source_map::SourceMapInputs; -use rustc_span::{Symbol, sym}; +use rustc_span::{SessionGlobals, Symbol, sym}; use rustc_target::spec::Target; use tracing::info; @@ -188,26 +188,11 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, // On deadlock, creates a new thread and forwards information in thread // locals to it. The new thread runs the deadlock handler. - // Get a `GlobalCtxt` reference from `CurrentGcx` as we cannot rely on having a - // `TyCtxt` TLS reference here. - let query_map = current_gcx2.access(|gcx| { - tls::enter_context(&tls::ImplicitCtxt::new(gcx), || { - tls::with(|tcx| { - match QueryCtxt::new(tcx).collect_active_jobs() { - Ok(query_map) => query_map, - Err(_) => { - // There was an unexpected error collecting all active jobs, which we need - // to find cycles to break. - // We want to avoid panicking in the deadlock handler, so we abort instead. - eprintln!("internal compiler error: failed to get query map in deadlock handler, aborting process"); - process::abort(); - } - } - }) - }) - }); - let query_map = FromDyn::from(query_map); + let current_gcx2 = current_gcx2.clone(); let registry = rayon_core::Registry::current(); + let session_globals = rustc_span::with_session_globals(|session_globals| { + session_globals as *const SessionGlobals as usize + }); thread::Builder::new() .name("rustc query cycle handler".to_string()) .spawn(move || { @@ -217,7 +202,24 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, // otherwise the compiler could just hang, process::abort(); }); - break_query_cycles(query_map.into_inner(), ®istry); + + // Get a `GlobalCtxt` reference from `CurrentGcx` as we cannot rely on having a + // `TyCtxt` TLS reference here. + current_gcx2.access(|gcx| { + tls::enter_context(&tls::ImplicitCtxt::new(gcx), || { + tls::with(|tcx| { + // Accessing session globals is sound as they outlive `GlobalCtxt`. + // They are needed to hash query keys containing spans or symbols. + let query_map = rustc_span::set_session_globals_then(unsafe { &*(session_globals as *const SessionGlobals) }, || { + // Ensure there was no errors collecting all active jobs. + // We need the complete map to ensure we find a cycle to break. + QueryCtxt::new(tcx).collect_active_jobs().ok().expect("failed to collect active queries in deadlock handler") + }); + break_query_cycles(query_map, ®istry); + }) + }) + }); + on_panic.disable(); }) .unwrap(); diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index d51865810b9..0a3eb434d3f 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -799,6 +799,9 @@ lint_tykind_kind = usage of `ty::TyKind::<kind>` lint_type_ir_inherent_usage = do not use `rustc_type_ir::inherent` unless you're inside of the trait solver .note = the method or struct you're looking for is likely defined somewhere else downstream in the compiler +lint_type_ir_trait_usage = do not use `rustc_type_ir::Interner` or `rustc_type_ir::InferCtxtLike` unless you're inside of the trait solver + .note = the method or struct you're looking for is likely defined somewhere else downstream in the compiler + lint_undropped_manually_drops = calls to `std::mem::drop` with `std::mem::ManuallyDrop` instead of the inner value does nothing .label = argument has type `{$arg_ty}` .suggestion = use `std::mem::ManuallyDrop::into_inner` to get the inner value diff --git a/compiler/rustc_lint/src/internal.rs b/compiler/rustc_lint/src/internal.rs index b359ee790a5..1d4be24ea9f 100644 --- a/compiler/rustc_lint/src/internal.rs +++ b/compiler/rustc_lint/src/internal.rs @@ -1,24 +1,21 @@ //! Some lints that are only useful in the compiler or crates that use compiler internals, such as //! Clippy. -use rustc_ast as ast; +use rustc_hir::HirId; use rustc_hir::def::Res; use rustc_hir::def_id::DefId; -use rustc_hir::{ - AmbigArg, BinOp, BinOpKind, Expr, ExprKind, GenericArg, HirId, Impl, Item, ItemKind, Node, Pat, - PatExpr, PatExprKind, PatKind, Path, PathSegment, QPath, Ty, TyKind, -}; use rustc_middle::ty::{self, GenericArgsRef, Ty as MiddleTy}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::hygiene::{ExpnKind, MacroKind}; use rustc_span::{Span, sym}; use tracing::debug; +use {rustc_ast as ast, rustc_hir as hir}; use crate::lints::{ BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonGlobImportTypeIrInherent, QueryInstability, QueryUntracked, SpanUseEqCtxtDiag, SymbolInternStringLiteralDiag, TyQualified, TykindDiag, TykindKind, TypeIrInherentUsage, - UntranslatableDiag, + TypeIrTraitUsage, UntranslatableDiag, }; use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext}; @@ -37,9 +34,12 @@ declare_tool_lint! { declare_lint_pass!(DefaultHashTypes => [DEFAULT_HASH_TYPES]); impl LateLintPass<'_> for DefaultHashTypes { - fn check_path(&mut self, cx: &LateContext<'_>, path: &Path<'_>, hir_id: HirId) { + fn check_path(&mut self, cx: &LateContext<'_>, path: &hir::Path<'_>, hir_id: HirId) { let Res::Def(rustc_hir::def::DefKind::Struct, def_id) = path.res else { return }; - if matches!(cx.tcx.hir_node(hir_id), Node::Item(Item { kind: ItemKind::Use(..), .. })) { + if matches!( + cx.tcx.hir_node(hir_id), + hir::Node::Item(hir::Item { kind: hir::ItemKind::Use(..), .. }) + ) { // Don't lint imports, only actual usages. return; } @@ -60,10 +60,10 @@ impl LateLintPass<'_> for DefaultHashTypes { /// get the `DefId` and `GenericArgsRef` of the function. fn typeck_results_of_method_fn<'tcx>( cx: &LateContext<'tcx>, - expr: &Expr<'_>, + expr: &hir::Expr<'_>, ) -> Option<(Span, DefId, ty::GenericArgsRef<'tcx>)> { match expr.kind { - ExprKind::MethodCall(segment, ..) + hir::ExprKind::MethodCall(segment, ..) if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) => { Some((segment.ident.span, def_id, cx.typeck_results().node_args(expr.hir_id))) @@ -102,7 +102,7 @@ declare_tool_lint! { declare_lint_pass!(QueryStability => [POTENTIAL_QUERY_INSTABILITY, UNTRACKED_QUERY_INFORMATION]); impl LateLintPass<'_> for QueryStability { - fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { + fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) { let Some((span, def_id, args)) = typeck_results_of_method_fn(cx, expr) else { return }; if let Ok(Some(instance)) = ty::Instance::try_resolve(cx.tcx, cx.typing_env(), def_id, args) { @@ -164,21 +164,25 @@ impl<'tcx> LateLintPass<'tcx> for TyTyKind { } } - fn check_ty(&mut self, cx: &LateContext<'_>, ty: &'tcx Ty<'tcx, AmbigArg>) { + fn check_ty(&mut self, cx: &LateContext<'_>, ty: &'tcx hir::Ty<'tcx, hir::AmbigArg>) { match &ty.kind { - TyKind::Path(QPath::Resolved(_, path)) => { + hir::TyKind::Path(hir::QPath::Resolved(_, path)) => { if lint_ty_kind_usage(cx, &path.res) { let span = match cx.tcx.parent_hir_node(ty.hir_id) { - Node::PatExpr(PatExpr { kind: PatExprKind::Path(qpath), .. }) - | Node::Pat(Pat { - kind: PatKind::TupleStruct(qpath, ..) | PatKind::Struct(qpath, ..), + hir::Node::PatExpr(hir::PatExpr { + kind: hir::PatExprKind::Path(qpath), + .. + }) + | hir::Node::Pat(hir::Pat { + kind: + hir::PatKind::TupleStruct(qpath, ..) | hir::PatKind::Struct(qpath, ..), .. }) - | Node::Expr( - Expr { kind: ExprKind::Path(qpath), .. } - | &Expr { kind: ExprKind::Struct(qpath, ..), .. }, + | hir::Node::Expr( + hir::Expr { kind: hir::ExprKind::Path(qpath), .. } + | &hir::Expr { kind: hir::ExprKind::Struct(qpath, ..), .. }, ) => { - if let QPath::TypeRelative(qpath_ty, ..) = qpath + if let hir::QPath::TypeRelative(qpath_ty, ..) = qpath && qpath_ty.hir_id == ty.hir_id { Some(path.span) @@ -223,7 +227,7 @@ fn lint_ty_kind_usage(cx: &LateContext<'_>, res: &Res) -> bool { } } -fn is_ty_or_ty_ctxt(cx: &LateContext<'_>, path: &Path<'_>) -> Option<String> { +fn is_ty_or_ty_ctxt(cx: &LateContext<'_>, path: &hir::Path<'_>) -> Option<String> { match &path.res { Res::Def(_, def_id) => { if let Some(name @ (sym::Ty | sym::TyCtxt)) = cx.tcx.get_diagnostic_name(*def_id) { @@ -244,13 +248,17 @@ fn is_ty_or_ty_ctxt(cx: &LateContext<'_>, path: &Path<'_>) -> Option<String> { None } -fn gen_args(segment: &PathSegment<'_>) -> String { +fn gen_args(segment: &hir::PathSegment<'_>) -> String { if let Some(args) = &segment.args { let lifetimes = args .args .iter() .filter_map(|arg| { - if let GenericArg::Lifetime(lt) = arg { Some(lt.ident.to_string()) } else { None } + if let hir::GenericArg::Lifetime(lt) = arg { + Some(lt.ident.to_string()) + } else { + None + } }) .collect::<Vec<_>>(); @@ -272,7 +280,7 @@ declare_tool_lint! { } declare_tool_lint! { - /// The `usage_of_type_ir_inherent` lint detects usage `rustc_type_ir::inherent`. + /// The `usage_of_type_ir_inherent` lint detects usage of `rustc_type_ir::inherent`. /// /// This module should only be used within the trait solver. pub rustc::USAGE_OF_TYPE_IR_INHERENT, @@ -281,10 +289,43 @@ declare_tool_lint! { report_in_external_macro: true } -declare_lint_pass!(TypeIr => [NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_INHERENT]); +declare_tool_lint! { + /// The `usage_of_type_ir_traits` lint detects usage of `rustc_type_ir::Interner`, + /// or `rustc_infer::InferCtxtLike`. + /// + /// Methods of this trait should only be used within the type system abstraction layer, + /// and in the generic next trait solver implementation. Look for an analogously named + /// method on `TyCtxt` or `InferCtxt` (respectively). + pub rustc::USAGE_OF_TYPE_IR_TRAITS, + Allow, + "usage `rustc_type_ir`-specific abstraction traits outside of trait system", + report_in_external_macro: true +} + +declare_lint_pass!(TypeIr => [NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_TRAITS]); impl<'tcx> LateLintPass<'tcx> for TypeIr { - fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) { + let res_def_id = match expr.kind { + hir::ExprKind::Path(hir::QPath::Resolved(_, path)) => path.res.opt_def_id(), + hir::ExprKind::Path(hir::QPath::TypeRelative(..)) | hir::ExprKind::MethodCall(..) => { + cx.typeck_results().type_dependent_def_id(expr.hir_id) + } + _ => return, + }; + let Some(res_def_id) = res_def_id else { + return; + }; + if let Some(assoc_item) = cx.tcx.opt_associated_item(res_def_id) + && let Some(trait_def_id) = assoc_item.trait_container(cx.tcx) + && (cx.tcx.is_diagnostic_item(sym::type_ir_interner, trait_def_id) + | cx.tcx.is_diagnostic_item(sym::type_ir_infer_ctxt_like, trait_def_id)) + { + cx.emit_span_lint(USAGE_OF_TYPE_IR_TRAITS, expr.span, TypeIrTraitUsage); + } + } + + fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) { let rustc_hir::ItemKind::Use(path, kind) = item.kind else { return }; let is_mod_inherent = |def_id| cx.tcx.is_diagnostic_item(sym::type_ir_inherent, def_id); @@ -394,15 +435,15 @@ declare_tool_lint! { declare_lint_pass!(Diagnostics => [UNTRANSLATABLE_DIAGNOSTIC, DIAGNOSTIC_OUTSIDE_OF_IMPL]); impl LateLintPass<'_> for Diagnostics { - fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { - let collect_args_tys_and_spans = |args: &[Expr<'_>], reserve_one_extra: bool| { + fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) { + let collect_args_tys_and_spans = |args: &[hir::Expr<'_>], reserve_one_extra: bool| { let mut result = Vec::with_capacity(args.len() + usize::from(reserve_one_extra)); result.extend(args.iter().map(|arg| (cx.typeck_results().expr_ty(arg), arg.span))); result }; // Only check function calls and method calls. let (span, def_id, fn_gen_args, arg_tys_and_spans) = match expr.kind { - ExprKind::Call(callee, args) => { + hir::ExprKind::Call(callee, args) => { match cx.typeck_results().node_type(callee.hir_id).kind() { &ty::FnDef(def_id, fn_gen_args) => { (callee.span, def_id, fn_gen_args, collect_args_tys_and_spans(args, false)) @@ -410,7 +451,7 @@ impl LateLintPass<'_> for Diagnostics { _ => return, // occurs for fns passed as args } } - ExprKind::MethodCall(_segment, _recv, args, _span) => { + hir::ExprKind::MethodCall(_segment, _recv, args, _span) => { let Some((span, def_id, fn_gen_args)) = typeck_results_of_method_fn(cx, expr) else { return; @@ -514,8 +555,8 @@ impl Diagnostics { let mut is_inside_appropriate_impl = false; for (_hir_id, parent) in cx.tcx.hir_parent_iter(current_id) { debug!(?parent); - if let Node::Item(Item { kind: ItemKind::Impl(impl_), .. }) = parent - && let Impl { of_trait: Some(of_trait), .. } = impl_ + if let hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(impl_), .. }) = parent + && let hir::Impl { of_trait: Some(of_trait), .. } = impl_ && let Some(def_id) = of_trait.trait_def_id() && let Some(name) = cx.tcx.get_diagnostic_name(def_id) && matches!(name, sym::Diagnostic | sym::Subdiagnostic | sym::LintDiagnostic) @@ -543,8 +584,8 @@ declare_tool_lint! { declare_lint_pass!(BadOptAccess => [BAD_OPT_ACCESS]); impl LateLintPass<'_> for BadOptAccess { - fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { - let ExprKind::Field(base, target) = expr.kind else { return }; + fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) { + let hir::ExprKind::Field(base, target) = expr.kind else { return }; let Some(adt_def) = cx.typeck_results().expr_ty(base).ty_adt_def() else { return }; // Skip types without `#[rustc_lint_opt_ty]` - only so that the rest of the lint can be // avoided. @@ -581,9 +622,12 @@ declare_tool_lint! { declare_lint_pass!(SpanUseEqCtxt => [SPAN_USE_EQ_CTXT]); impl<'tcx> LateLintPass<'tcx> for SpanUseEqCtxt { - fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) { - if let ExprKind::Binary(BinOp { node: BinOpKind::Eq | BinOpKind::Ne, .. }, lhs, rhs) = - expr.kind + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &hir::Expr<'_>) { + if let hir::ExprKind::Binary( + hir::BinOp { node: hir::BinOpKind::Eq | hir::BinOpKind::Ne, .. }, + lhs, + rhs, + ) = expr.kind { if is_span_ctxt_call(cx, lhs) && is_span_ctxt_call(cx, rhs) { cx.emit_span_lint(SPAN_USE_EQ_CTXT, expr.span, SpanUseEqCtxtDiag); @@ -592,9 +636,9 @@ impl<'tcx> LateLintPass<'tcx> for SpanUseEqCtxt { } } -fn is_span_ctxt_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { +fn is_span_ctxt_call(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool { match &expr.kind { - ExprKind::MethodCall(..) => cx + hir::ExprKind::MethodCall(..) => cx .typeck_results() .type_dependent_def_id(expr.hir_id) .is_some_and(|call_did| cx.tcx.is_diagnostic_item(sym::SpanCtxt, call_did)), @@ -617,11 +661,11 @@ declare_lint_pass!(SymbolInternStringLiteral => [SYMBOL_INTERN_STRING_LITERAL]); impl<'tcx> LateLintPass<'tcx> for SymbolInternStringLiteral { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx rustc_hir::Expr<'tcx>) { - if let ExprKind::Call(path, [arg]) = expr.kind - && let ExprKind::Path(ref qpath) = path.kind + if let hir::ExprKind::Call(path, [arg]) = expr.kind + && let hir::ExprKind::Path(ref qpath) = path.kind && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id() && cx.tcx.is_diagnostic_item(sym::SymbolIntern, def_id) - && let ExprKind::Lit(kind) = arg.kind + && let hir::ExprKind::Lit(kind) = arg.kind && let rustc_ast::LitKind::Str(_, _) = kind.node { cx.emit_span_lint( diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index c38a7540018..cd474f1b7db 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -645,6 +645,7 @@ fn register_internals(store: &mut LintStore) { LintId::of(USAGE_OF_QUALIFIED_TY), LintId::of(NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT), LintId::of(USAGE_OF_TYPE_IR_INHERENT), + LintId::of(USAGE_OF_TYPE_IR_TRAITS), LintId::of(BAD_OPT_ACCESS), LintId::of(SPAN_USE_EQ_CTXT), ], diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index 00586309572..036d68d13fa 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -944,6 +944,11 @@ pub(crate) struct TyQualified { pub(crate) struct TypeIrInherentUsage; #[derive(LintDiagnostic)] +#[diag(lint_type_ir_trait_usage)] +#[note] +pub(crate) struct TypeIrTraitUsage; + +#[derive(LintDiagnostic)] #[diag(lint_non_glob_import_type_ir_inherent)] pub(crate) struct NonGlobImportTypeIrInherent { #[suggestion(code = "{snippet}", applicability = "maybe-incorrect")] diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs index f8e0a94f9ec..9ed11d9cc82 100644 --- a/compiler/rustc_lint/src/non_local_def.rs +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -104,8 +104,10 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { // determining if we are in a doctest context can't currently be determined // by the code itself (there are no specific attributes), but fortunately rustdoc // sets a perma-unstable env var for libtest so we just reuse that for now - let is_at_toplevel_doctest = - || self.body_depth == 2 && std::env::var("UNSTABLE_RUSTDOC_TEST_PATH").is_ok(); + let is_at_toplevel_doctest = || { + self.body_depth == 2 + && cx.tcx.env_var_os("UNSTABLE_RUSTDOC_TEST_PATH".as_ref()).is_some() + }; match item.kind { ItemKind::Impl(impl_) => { diff --git a/compiler/rustc_middle/src/middle/region.rs b/compiler/rustc_middle/src/middle/region.rs index 66861519e17..66ece8f0e52 100644 --- a/compiler/rustc_middle/src/middle/region.rs +++ b/compiler/rustc_middle/src/middle/region.rs @@ -224,7 +224,7 @@ pub struct ScopeTree { /// and not the enclosing *statement*. Expressions that are not present in this /// table are not rvalue candidates. The set of rvalue candidates is computed /// during type check based on a traversal of the AST. - pub rvalue_candidates: HirIdMap<RvalueCandidateType>, + pub rvalue_candidates: HirIdMap<RvalueCandidate>, /// Backwards incompatible scoping that will be introduced in future editions. /// This information is used later for linting to identify locals and @@ -308,15 +308,14 @@ pub struct ScopeTree { pub yield_in_scope: UnordMap<Scope, Vec<YieldData>>, } -/// Identifies the reason that a given expression is an rvalue candidate -/// (see the `rvalue_candidates` field for more information what rvalue -/// candidates in general). In constants, the `lifetime` field is None -/// to indicate that certain expressions escape into 'static and -/// should have no local cleanup scope. +/// See the `rvalue_candidates` field for more information on rvalue +/// candidates in general. +/// The `lifetime` field is None to indicate that certain expressions escape +/// into 'static and should have no local cleanup scope. #[derive(Debug, Copy, Clone, HashStable)] -pub enum RvalueCandidateType { - Borrow { target: hir::ItemLocalId, lifetime: Option<Scope> }, - Pattern { target: hir::ItemLocalId, lifetime: Option<Scope> }, +pub struct RvalueCandidate { + pub target: hir::ItemLocalId, + pub lifetime: Option<Scope>, } #[derive(Debug, Copy, Clone, HashStable)] @@ -344,16 +343,12 @@ impl ScopeTree { self.var_map.insert(var, lifetime); } - pub fn record_rvalue_candidate(&mut self, var: HirId, candidate_type: RvalueCandidateType) { - debug!("record_rvalue_candidate(var={var:?}, type={candidate_type:?})"); - match &candidate_type { - RvalueCandidateType::Borrow { lifetime: Some(lifetime), .. } - | RvalueCandidateType::Pattern { lifetime: Some(lifetime), .. } => { - assert!(var.local_id != lifetime.local_id) - } - _ => {} + pub fn record_rvalue_candidate(&mut self, var: HirId, candidate: RvalueCandidate) { + debug!("record_rvalue_candidate(var={var:?}, candidate={candidate:?})"); + if let Some(lifetime) = &candidate.lifetime { + assert!(var.local_id != lifetime.local_id) } - self.rvalue_candidates.insert(var, candidate_type); + self.rvalue_candidates.insert(var, candidate); } /// Returns the narrowest scope that encloses `id`, if any. diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs index 7bbaa0496d5..6c6b9a5510c 100644 --- a/compiler/rustc_middle/src/query/erase.rs +++ b/compiler/rustc_middle/src/query/erase.rs @@ -1,3 +1,4 @@ +use std::ffi::OsStr; use std::intrinsics::transmute_unchecked; use std::mem::MaybeUninit; @@ -67,6 +68,10 @@ impl<T> EraseType for &'_ [T] { type Result = [u8; size_of::<&'static [()]>()]; } +impl EraseType for &'_ OsStr { + type Result = [u8; size_of::<&'static OsStr>()]; +} + impl<T> EraseType for &'_ ty::List<T> { type Result = [u8; size_of::<&'static ty::List<()>>()]; } @@ -174,6 +179,10 @@ impl<T> EraseType for Option<&'_ [T]> { type Result = [u8; size_of::<Option<&'static [()]>>()]; } +impl EraseType for Option<&'_ OsStr> { + type Result = [u8; size_of::<Option<&'static OsStr>>()]; +} + impl EraseType for Option<mir::DestructuredConstant<'_>> { type Result = [u8; size_of::<Option<mir::DestructuredConstant<'static>>>()]; } diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index 98314b5abfd..c382bcd726f 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -1,5 +1,7 @@ //! Defines the set of legal keys that can be used in queries. +use std::ffi::OsStr; + use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalModDefId, ModDefId}; use rustc_hir::hir_id::{HirId, OwnerId}; use rustc_query_system::dep_graph::DepNodeIndex; @@ -498,6 +500,14 @@ impl Key for Option<Symbol> { } } +impl<'tcx> Key for &'tcx OsStr { + type Cache<V> = DefaultCache<Self, V>; + + fn default_span(&self, _tcx: TyCtxt<'_>) -> Span { + DUMMY_SP + } +} + /// Canonical query goals correspond to abstract trait operations that /// are not tied to any crate in particular. impl<'tcx, T: Clone> Key for CanonicalQueryInput<'tcx, T> { diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 527c18addbe..d7ed703f4ae 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -6,6 +6,7 @@ #![allow(unused_parens)] +use std::ffi::OsStr; use std::mem; use std::path::PathBuf; use std::sync::Arc; @@ -30,7 +31,9 @@ use rustc_index::IndexVec; use rustc_lint_defs::LintId; use rustc_macros::rustc_queries; use rustc_query_system::ich::StableHashingContext; -use rustc_query_system::query::{QueryCache, QueryMode, QueryState, try_get_cached}; +use rustc_query_system::query::{ + QueryCache, QueryMode, QueryStackDeferred, QueryState, try_get_cached, +}; use rustc_session::Limits; use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion}; use rustc_session::cstore::{ @@ -119,6 +122,21 @@ rustc_queries! { desc { "perform lints prior to AST lowering" } } + /// Tracked access to environment variables. + /// + /// Useful for the implementation of `std::env!`, `proc-macro`s change + /// detection and other changes in the compiler's behaviour that is easier + /// to control with an environment variable than a flag. + /// + /// NOTE: This currently does not work with dependency info in the + /// analysis, codegen and linking passes, place extra code at the top of + /// `rustc_interface::passes::write_dep_info` to make that work. + query env_var_os(key: &'tcx OsStr) -> Option<&'tcx OsStr> { + // Environment variables are global state + eval_always + desc { "get the value of an environment variable" } + } + query resolutions(_: ()) -> &'tcx ty::ResolverGlobalCtxt { no_hash desc { "getting the resolver outputs" } diff --git a/compiler/rustc_middle/src/query/plumbing.rs b/compiler/rustc_middle/src/query/plumbing.rs index 4834444ed1d..a099f770417 100644 --- a/compiler/rustc_middle/src/query/plumbing.rs +++ b/compiler/rustc_middle/src/query/plumbing.rs @@ -488,7 +488,7 @@ macro_rules! define_callbacks { #[derive(Default)] pub struct QueryStates<'tcx> { $( - pub $name: QueryState<$($K)*>, + pub $name: QueryState<$($K)*, QueryStackDeferred<'tcx>>, )* } diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 834d1f2a4a8..08d4c1f9cf2 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -7,6 +7,8 @@ pub mod tls; use std::assert_matches::{assert_matches, debug_assert_matches}; use std::borrow::Borrow; use std::cmp::Ordering; +use std::env::VarError; +use std::ffi::OsStr; use std::hash::{Hash, Hasher}; use std::marker::PhantomData; use std::ops::{Bound, Deref}; @@ -1883,6 +1885,15 @@ impl<'tcx> TyCtxt<'tcx> { } None } + + /// Helper to get a tracked environment variable via. [`TyCtxt::env_var_os`] and converting to + /// UTF-8 like [`std::env::var`]. + pub fn env_var<K: ?Sized + AsRef<OsStr>>(self, key: &'tcx K) -> Result<&'tcx str, VarError> { + match self.env_var_os(key.as_ref()) { + Some(value) => value.to_str().ok_or_else(|| VarError::NotUnicode(value.to_os_string())), + None => Err(VarError::NotPresent), + } + } } impl<'tcx> TyCtxtAt<'tcx> { diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs index 85d9db7ee74..d4cc562e70c 100644 --- a/compiler/rustc_middle/src/ty/generics.rs +++ b/compiler/rustc_middle/src/ty/generics.rs @@ -73,9 +73,7 @@ impl GenericParamDef { pub fn is_anonymous_lifetime(&self) -> bool { match self.kind { - GenericParamDefKind::Lifetime => { - self.name == kw::UnderscoreLifetime || self.name == kw::Empty - } + GenericParamDefKind::Lifetime => self.name == kw::UnderscoreLifetime, _ => false, } } diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 6bdd0a0647d..ac98cbc8d6c 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -457,7 +457,7 @@ impl EarlyParamRegion { /// Does this early bound region have a name? Early bound regions normally /// always have names except when using anonymous lifetimes (`'_`). pub fn has_name(&self) -> bool { - self.name != kw::UnderscoreLifetime && self.name != kw::Empty + self.name != kw::UnderscoreLifetime } } diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 3ef8ecc59e4..3281cb4135a 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2591,11 +2591,9 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { // to fit that into a short string. Hence the recommendation to use // `explain_region()` or `note_and_explain_region()`. match *region { - ty::ReEarlyParam(ref data) => { - if data.name != kw::Empty { - p!(write("{}", data.name)); - return Ok(()); - } + ty::ReEarlyParam(data) => { + p!(write("{}", data.name)); + return Ok(()); } ty::ReLateParam(ty::LateParamRegion { kind, .. }) => { if let Some(name) = kind.get_name() { @@ -2834,7 +2832,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { (name, ty::BoundRegionKind::Named(CRATE_DEF_ID.to_def_id(), name)) } - ty::BoundRegionKind::Named(def_id, kw::UnderscoreLifetime | kw::Empty) => { + ty::BoundRegionKind::Named(def_id, kw::UnderscoreLifetime) => { let name = next_name(self); if let Some(lt_idx) = lifetime_idx { diff --git a/compiler/rustc_middle/src/ty/region.rs b/compiler/rustc_middle/src/ty/region.rs index fb52cf96b02..c78306f2ca3 100644 --- a/compiler/rustc_middle/src/ty/region.rs +++ b/compiler/rustc_middle/src/ty/region.rs @@ -400,9 +400,7 @@ impl LateParamRegionKind { pub fn is_named(&self) -> bool { match *self { - LateParamRegionKind::Named(_, name) => { - name != kw::UnderscoreLifetime && name != kw::Empty - } + LateParamRegionKind::Named(_, name) => name != kw::UnderscoreLifetime, _ => false, } } @@ -475,7 +473,7 @@ impl core::fmt::Debug for BoundRegion { impl BoundRegionKind { pub fn is_named(&self) -> bool { match *self { - BoundRegionKind::Named(_, name) => name != kw::UnderscoreLifetime && name != kw::Empty, + BoundRegionKind::Named(_, name) => name != kw::UnderscoreLifetime, _ => false, } } diff --git a/compiler/rustc_middle/src/values.rs b/compiler/rustc_middle/src/values.rs index 9450ce7ec44..39fcc686c55 100644 --- a/compiler/rustc_middle/src/values.rs +++ b/compiler/rustc_middle/src/values.rs @@ -88,7 +88,7 @@ impl<'tcx> Value<TyCtxt<'tcx>> for Representability { if info.query.dep_kind == dep_kinds::representability && let Some(field_id) = info.query.def_id && let Some(field_id) = field_id.as_local() - && let Some(DefKind::Field) = info.query.def_kind + && let Some(DefKind::Field) = info.query.info.def_kind { let parent_id = tcx.parent(field_id.to_def_id()); let item_id = match tcx.def_kind(parent_id) { @@ -216,7 +216,7 @@ impl<'tcx, T> Value<TyCtxt<'tcx>> for Result<T, &'_ ty::layout::LayoutError<'_>> continue; }; let frame_span = - frame.query.default_span(cycle[(i + 1) % cycle.len()].span); + frame.query.info.default_span(cycle[(i + 1) % cycle.len()].span); if frame_span.is_dummy() { continue; } diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index 231d7c2ef02..e7930f0a1e3 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -12,6 +12,7 @@ use rustc_middle::mir::coverage::CoverageKind; use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::adjustment::PointerCoercion; +use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{ self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Upcast, Variance, }; @@ -56,7 +57,7 @@ impl<'tcx> crate::MirPass<'tcx> for Validator { ty::Coroutine(..) => ExternAbi::Rust, // No need to do MIR validation on error bodies ty::Error(_) => return, - _ => span_bug!(body.span, "unexpected body ty: {body_ty:?}"), + _ => span_bug!(body.span, "unexpected body ty: {body_ty}"), }; ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi) @@ -543,7 +544,13 @@ pub(super) fn validate_types<'tcx>( caller_body: &Body<'tcx>, ) -> Vec<(Location, String)> { let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() }; - type_checker.visit_body(body); + // The type checker formats a bunch of strings with type names in it, but these strings + // are not always going to be encountered on the error path since the inliner also uses + // the validator, and there are certain kinds of inlining (even for valid code) that + // can cause validation errors (mostly around where clauses and rigid projections). + with_no_trimmed_paths!({ + type_checker.visit_body(body); + }); type_checker.failures } @@ -655,7 +662,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { ProjectionElem::Index(index) => { let index_ty = self.body.local_decls[index].ty; if index_ty != self.tcx.types.usize { - self.fail(location, format!("bad index ({index_ty:?} != usize)")) + self.fail(location, format!("bad index ({index_ty} != usize)")) } } ProjectionElem::Deref @@ -664,10 +671,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty; if base_ty.is_box() { - self.fail( - location, - format!("{base_ty:?} dereferenced after ElaborateBoxDerefs"), - ) + self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs")) } } ProjectionElem::Field(f, ty) => { @@ -680,7 +684,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { this.fail( location, format!( - "Field projection `{place_ref:?}.{f:?}` specified type `{ty:?}`, but actual type is `{f_ty:?}`" + "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`" ) ) } @@ -806,7 +810,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { self.fail( location, format!( - "Failed subtyping {ty:#?} and {:#?}", + "Failed subtyping {ty} and {}", place_ref.ty(&self.body.local_decls, self.tcx).ty ), ) @@ -826,7 +830,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { self.fail( location, format!( - "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty:?}" + "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}" ), ); } @@ -841,7 +845,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { if ty.is_union() || ty.is_enum() { self.fail( START_BLOCK.start_location(), - format!("invalid type {ty:?} in debuginfo for {:?}", debuginfo.name), + format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name), ); } if projection.is_empty() { @@ -1064,15 +1068,13 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { if !self.mir_assign_valid_types(a, b) { self.fail( location, - format!("Cannot {op:?} compare incompatible types {a:?} and {b:?}"), + format!("Cannot {op:?} compare incompatible types {a} and {b}"), ); } } else if a != b { self.fail( location, - format!( - "Cannot perform binary op {op:?} on unequal types {a:?} and {b:?}" - ), + format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"), ); } } @@ -1081,7 +1083,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { Offset => { check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..)); if b != self.tcx.types.isize && b != self.tcx.types.usize { - self.fail(location, format!("Cannot offset by non-isize type {b:?}")); + self.fail(location, format!("Cannot offset by non-isize type {b}")); } } Eq | Lt | Le | Ne | Ge | Gt => { @@ -1313,7 +1315,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { { self.fail( location, - format!("Cannot transmute from non-`Sized` type {op_ty:?}"), + format!("Cannot transmute from non-`Sized` type {op_ty}"), ); } if !self @@ -1340,7 +1342,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => { let fail_out_of_bounds = |this: &mut Self, location, field, ty| { - this.fail(location, format!("Out of bounds field {field:?} for {ty:?}")); + this.fail(location, format!("Out of bounds field {field:?} for {ty}")); }; let mut current_ty = *container; @@ -1374,7 +1376,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { _ => { self.fail( location, - format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty:?}"), + format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty}"), ); return; } @@ -1403,7 +1405,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) { self.fail( location, - format!("Cannot wrap {unwrapped_ty:?} into unsafe binder {binder_ty:?}"), + format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"), ); } } @@ -1489,24 +1491,27 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { // since CopyNonOverlapping is parametrized by 1 type, // we only need to check that they are equal and not keep an extra parameter. if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) { - self.fail(location, format!("bad arg ({op_src_ty:?} != {op_dst_ty:?})")); + self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})")); } let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx); if op_cnt_ty != self.tcx.types.usize { - self.fail(location, format!("bad arg ({op_cnt_ty:?} != usize)")) + self.fail(location, format!("bad arg ({op_cnt_ty} != usize)")) } } StatementKind::SetDiscriminant { place, .. } => { if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) { self.fail(location, "`SetDiscriminant`is not allowed until deaggregation"); } - let pty = place.ty(&self.body.local_decls, self.tcx).ty.kind(); - if !matches!(pty, ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)) { + let pty = place.ty(&self.body.local_decls, self.tcx).ty; + if !matches!( + pty.kind(), + ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..) + ) { self.fail( location, format!( - "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty:?}" + "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}" ), ); } @@ -1555,7 +1560,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { if ScalarInt::try_from_uint(value, size).is_none() { self.fail( location, - format!("the value {value:#x} is not a proper {switch_ty:?}"), + format!("the value {value:#x} is not a proper {switch_ty}"), ) } } diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index 67fca1d7c29..2a1b20ba48b 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -225,13 +225,13 @@ use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCoercion}; use rustc_middle::ty::layout::ValidityRequirement; use rustc_middle::ty::print::{shrunk_instance_name, with_no_trimmed_paths}; use rustc_middle::ty::{ - self, GenericArgs, GenericParamDefKind, Instance, InstanceKind, Interner, Ty, TyCtxt, - TypeFoldable, TypeVisitableExt, VtblEntry, + self, GenericArgs, GenericParamDefKind, Instance, InstanceKind, Ty, TyCtxt, TypeFoldable, + TypeVisitableExt, VtblEntry, }; use rustc_middle::util::Providers; use rustc_middle::{bug, span_bug}; use rustc_session::Limit; -use rustc_session::config::EntryFnType; +use rustc_session::config::{DebugInfo, EntryFnType}; use rustc_span::source_map::{Spanned, dummy_spanned, respan}; use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument, trace}; @@ -967,7 +967,7 @@ fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> { // `#[rustc_force_inline]` items should never be codegened. This should be caught by // the MIR validator. - tcx.delay_bug("attempt to codegen `#[rustc_force_inline]` item"); + tcx.dcx().delayed_bug("attempt to codegen `#[rustc_force_inline]` item"); } if def_id.is_local() { @@ -1235,6 +1235,11 @@ fn collect_items_of_instance<'tcx>( }; if mode == CollectionMode::UsedItems { + if tcx.sess.opts.debuginfo == DebugInfo::Full { + for var_debug_info in &body.var_debug_info { + collector.visit_var_debug_info(var_debug_info); + } + } for (bb, data) in traversal::mono_reachable(body, tcx, instance) { collector.visit_basic_block_data(bb, data) } diff --git a/compiler/rustc_next_trait_solver/src/lib.rs b/compiler/rustc_next_trait_solver/src/lib.rs index f6963a79067..f575fe03019 100644 --- a/compiler/rustc_next_trait_solver/src/lib.rs +++ b/compiler/rustc_next_trait_solver/src/lib.rs @@ -6,6 +6,7 @@ // tidy-alphabetical-start #![allow(rustc::usage_of_type_ir_inherent)] +#![cfg_attr(not(bootstrap), allow(rustc::usage_of_type_ir_traits))] // tidy-alphabetical-end pub mod canonicalizer; diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs index 9ecde2a9eb5..6bbd650dcdf 100644 --- a/compiler/rustc_parse/src/validate_attr.rs +++ b/compiler/rustc_parse/src/validate_attr.rs @@ -16,7 +16,8 @@ use rustc_span::{Span, Symbol, sym}; use crate::{errors, parse_in}; pub fn check_attr(psess: &ParseSess, attr: &Attribute) { - if attr.is_doc_comment() || attr.has_name(sym::cfg_attr_trace) { + if attr.is_doc_comment() || attr.has_name(sym::cfg_trace) || attr.has_name(sym::cfg_attr_trace) + { return; } @@ -215,11 +216,7 @@ pub fn check_builtin_meta_item( template: AttributeTemplate, deny_unsafety: bool, ) { - // Some special attributes like `cfg` must be checked - // before the generic check, so we skip them here. - let should_skip = |name| name == sym::cfg; - - if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) { + if !is_attr_template_compatible(&template, &meta.kind) { emit_malformed_attribute(psess, style, meta.span, name, template); } diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index 9238c73cdb1..1e1fb42a48f 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -272,6 +272,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | sym::forbid | sym::cfg | sym::cfg_attr + | sym::cfg_trace | sym::cfg_attr_trace // need to be fixed | sym::cfi_encoding // FIXME(cfi_encoding) @@ -574,8 +575,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // NOTE: when making changes to this list, check that `error_codes/E0736.md` remains accurate const ALLOW_LIST: &[rustc_span::Symbol] = &[ // conditional compilation - sym::cfg, - sym::cfg_attr, + sym::cfg_trace, sym::cfg_attr_trace, // testing (allowed here so better errors can be generated in `rustc_builtin_macros::test`) sym::test, @@ -2656,7 +2656,7 @@ impl<'tcx> Visitor<'tcx> for CheckAttrVisitor<'tcx> { // only `#[cfg]` and `#[cfg_attr]` are allowed, but it should be removed // if we allow more attributes (e.g., tool attributes and `allow/deny/warn`) // in where clauses. After that, only `self.check_attributes` should be enough. - const ATTRS_ALLOWED: &[Symbol] = &[sym::cfg, sym::cfg_attr, sym::cfg_attr_trace]; + const ATTRS_ALLOWED: &[Symbol] = &[sym::cfg_trace, sym::cfg_attr_trace]; let spans = self .tcx .hir_attrs(where_predicate.hir_id) diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs index a83c388c747..30a9e718d23 100644 --- a/compiler/rustc_query_impl/src/lib.rs +++ b/compiler/rustc_query_impl/src/lib.rs @@ -26,8 +26,8 @@ use rustc_middle::ty::TyCtxt; use rustc_query_system::dep_graph::SerializedDepNodeIndex; use rustc_query_system::ich::StableHashingContext; use rustc_query_system::query::{ - CycleError, HashResult, QueryCache, QueryConfig, QueryMap, QueryMode, QueryState, - get_query_incr, get_query_non_incr, + CycleError, HashResult, QueryCache, QueryConfig, QueryMap, QueryMode, QueryStackDeferred, + QueryState, get_query_incr, get_query_non_incr, }; use rustc_query_system::{HandleCycleError, Value}; use rustc_span::{ErrorGuaranteed, Span}; @@ -84,7 +84,10 @@ where } #[inline(always)] - fn query_state<'a>(self, qcx: QueryCtxt<'tcx>) -> &'a QueryState<Self::Key> + fn query_state<'a>( + self, + qcx: QueryCtxt<'tcx>, + ) -> &'a QueryState<Self::Key, QueryStackDeferred<'tcx>> where QueryCtxt<'tcx>: 'a, { @@ -93,7 +96,7 @@ where unsafe { &*(&qcx.tcx.query_system.states as *const QueryStates<'tcx>) .byte_add(self.dynamic.query_state) - .cast::<QueryState<Self::Key>>() + .cast::<QueryState<Self::Key, QueryStackDeferred<'tcx>>>() } } diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs index 55281cd5ac7..3238c7a0912 100644 --- a/compiler/rustc_query_impl/src/plumbing.rs +++ b/compiler/rustc_query_impl/src/plumbing.rs @@ -5,6 +5,7 @@ use std::num::NonZero; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_data_structures::sync::{DynSend, DynSync}; use rustc_data_structures::unord::UnordMap; use rustc_hashes::Hash64; use rustc_index::Idx; @@ -24,8 +25,8 @@ use rustc_middle::ty::{self, TyCtxt}; use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext}; use rustc_query_system::ich::StableHashingContext; use rustc_query_system::query::{ - QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffect, QueryStackFrame, - force_query, + QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffect, + QueryStackDeferred, QueryStackFrame, QueryStackFrameExtra, force_query, }; use rustc_query_system::{QueryOverflow, QueryOverflowNote}; use rustc_serialize::{Decodable, Encodable}; @@ -65,7 +66,9 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> { } } -impl QueryContext for QueryCtxt<'_> { +impl<'tcx> QueryContext for QueryCtxt<'tcx> { + type QueryInfo = QueryStackDeferred<'tcx>; + #[inline] fn next_job_id(self) -> QueryJobId { QueryJobId( @@ -82,7 +85,9 @@ impl QueryContext for QueryCtxt<'_> { /// Returns a query map representing active query jobs. /// It returns an incomplete map as an error if it fails /// to take locks. - fn collect_active_jobs(self) -> Result<QueryMap, QueryMap> { + fn collect_active_jobs( + self, + ) -> Result<QueryMap<QueryStackDeferred<'tcx>>, QueryMap<QueryStackDeferred<'tcx>>> { let mut jobs = QueryMap::default(); let mut complete = true; @@ -95,6 +100,13 @@ impl QueryContext for QueryCtxt<'_> { if complete { Ok(jobs) } else { Err(jobs) } } + fn lift_query_info( + self, + info: &QueryStackDeferred<'tcx>, + ) -> rustc_query_system::query::QueryStackFrameExtra { + info.extract() + } + // Interactions with on_disk_cache fn load_side_effect( self, @@ -159,7 +171,10 @@ impl QueryContext for QueryCtxt<'_> { self.sess.dcx().emit_fatal(QueryOverflow { span: info.job.span, - note: QueryOverflowNote { desc: info.query.description, depth }, + note: QueryOverflowNote { + desc: self.lift_query_info(&info.query.info).description, + depth, + }, suggested_limit, crate_name: self.crate_name(LOCAL_CRATE), }); @@ -296,16 +311,17 @@ macro_rules! should_ever_cache_on_disk { }; } -pub(crate) fn create_query_frame< - 'tcx, - K: Copy + Key + for<'a> HashStable<StableHashingContext<'a>>, ->( - tcx: TyCtxt<'tcx>, - do_describe: fn(TyCtxt<'tcx>, K) -> String, - key: K, - kind: DepKind, - name: &'static str, -) -> QueryStackFrame { +fn create_query_frame_extra<'tcx, K: Key + Copy + 'tcx>( + (tcx, key, kind, name, do_describe): ( + TyCtxt<'tcx>, + K, + DepKind, + &'static str, + fn(TyCtxt<'tcx>, K) -> String, + ), +) -> QueryStackFrameExtra { + let def_id = key.key_as_def_id(); + // If reduced queries are requested, we may be printing a query stack due // to a panic. Avoid using `default_span` and `def_kind` in that case. let reduce_queries = with_reduced_queries(); @@ -324,13 +340,28 @@ pub(crate) fn create_query_frame< } else { Some(key.default_span(tcx)) }; - let def_id = key.key_as_def_id(); + let def_kind = if kind == dep_graph::dep_kinds::def_kind || reduce_queries { // Try to avoid infinite recursion. None } else { def_id.and_then(|def_id| def_id.as_local()).map(|def_id| tcx.def_kind(def_id)) }; + QueryStackFrameExtra::new(description, span, def_kind) +} + +pub(crate) fn create_query_frame< + 'tcx, + K: Copy + DynSend + DynSync + Key + for<'a> HashStable<StableHashingContext<'a>> + 'tcx, +>( + tcx: TyCtxt<'tcx>, + do_describe: fn(TyCtxt<'tcx>, K) -> String, + key: K, + kind: DepKind, + name: &'static str, +) -> QueryStackFrame<QueryStackDeferred<'tcx>> { + let def_id = key.key_as_def_id(); + let hash = || { tcx.with_stable_hashing_context(|mut hcx| { let mut hasher = StableHasher::new(); @@ -341,7 +372,10 @@ pub(crate) fn create_query_frame< }; let def_id_for_ty_in_cycle = key.def_id_for_ty_in_cycle(); - QueryStackFrame::new(description, span, def_id, def_kind, kind, def_id_for_ty_in_cycle, hash) + let info = + QueryStackDeferred::new((tcx, key, kind, name, do_describe), create_query_frame_extra); + + QueryStackFrame::new(info, kind, hash, def_id, def_id_for_ty_in_cycle) } pub(crate) fn encode_query_results<'a, 'tcx, Q>( @@ -688,7 +722,10 @@ macro_rules! define_queries { } } - pub(crate) fn try_collect_active_jobs<'tcx>(tcx: TyCtxt<'tcx>, qmap: &mut QueryMap) -> Option<()> { + pub(crate) fn try_collect_active_jobs<'tcx>( + tcx: TyCtxt<'tcx>, + qmap: &mut QueryMap<QueryStackDeferred<'tcx>>, + ) -> Option<()> { let make_query = |tcx, key| { let kind = rustc_middle::dep_graph::dep_kinds::$name; let name = stringify!($name); @@ -768,7 +805,9 @@ macro_rules! define_queries { // These arrays are used for iteration and can't be indexed by `DepKind`. - const TRY_COLLECT_ACTIVE_JOBS: &[for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap) -> Option<()>] = + const TRY_COLLECT_ACTIVE_JOBS: &[ + for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<QueryStackDeferred<'tcx>>) -> Option<()> + ] = &[$(query_impl::$name::try_collect_active_jobs),*]; const ALLOC_SELF_PROFILE_QUERY_STRINGS: &[ diff --git a/compiler/rustc_query_system/src/ich/mod.rs b/compiler/rustc_query_system/src/ich/mod.rs index 852d93b711f..25778add60a 100644 --- a/compiler/rustc_query_system/src/ich/mod.rs +++ b/compiler/rustc_query_system/src/ich/mod.rs @@ -8,7 +8,7 @@ mod hcx; mod impls_syntax; pub const IGNORED_ATTRIBUTES: &[Symbol] = &[ - sym::cfg, + sym::cfg_trace, // FIXME should this really be ignored? sym::rustc_if_this_changed, sym::rustc_then_this_would_need, sym::rustc_dirty, diff --git a/compiler/rustc_query_system/src/query/config.rs b/compiler/rustc_query_system/src/query/config.rs index 371b896400a..e508eadb73b 100644 --- a/compiler/rustc_query_system/src/query/config.rs +++ b/compiler/rustc_query_system/src/query/config.rs @@ -6,6 +6,7 @@ use std::hash::Hash; use rustc_data_structures::fingerprint::Fingerprint; use rustc_span::ErrorGuaranteed; +use super::QueryStackFrameExtra; use crate::dep_graph::{DepKind, DepNode, DepNodeParams, SerializedDepNodeIndex}; use crate::error::HandleCycleError; use crate::ich::StableHashingContext; @@ -27,7 +28,7 @@ pub trait QueryConfig<Qcx: QueryContext>: Copy { fn format_value(self) -> fn(&Self::Value) -> String; // Don't use this method to access query results, instead use the methods on TyCtxt - fn query_state<'a>(self, tcx: Qcx) -> &'a QueryState<Self::Key> + fn query_state<'a>(self, tcx: Qcx) -> &'a QueryState<Self::Key, Qcx::QueryInfo> where Qcx: 'a; @@ -57,7 +58,7 @@ pub trait QueryConfig<Qcx: QueryContext>: Copy { fn value_from_cycle_error( self, tcx: Qcx::DepContext, - cycle_error: &CycleError, + cycle_error: &CycleError<QueryStackFrameExtra>, guar: ErrorGuaranteed, ) -> Self::Value; diff --git a/compiler/rustc_query_system/src/query/job.rs b/compiler/rustc_query_system/src/query/job.rs index 402c7831472..de35cd79ea2 100644 --- a/compiler/rustc_query_system/src/query/job.rs +++ b/compiler/rustc_query_system/src/query/job.rs @@ -1,3 +1,4 @@ +use std::fmt::Debug; use std::hash::Hash; use std::io::Write; use std::iter; @@ -12,6 +13,7 @@ use rustc_hir::def::DefKind; use rustc_session::Session; use rustc_span::{DUMMY_SP, Span}; +use super::QueryStackFrameExtra; use crate::dep_graph::DepContext; use crate::error::CycleStack; use crate::query::plumbing::CycleError; @@ -19,45 +21,54 @@ use crate::query::{QueryContext, QueryStackFrame}; /// Represents a span and a query key. #[derive(Clone, Debug)] -pub struct QueryInfo { +pub struct QueryInfo<I> { /// The span corresponding to the reason for which this query was required. pub span: Span, - pub query: QueryStackFrame, + pub query: QueryStackFrame<I>, } -pub type QueryMap = FxHashMap<QueryJobId, QueryJobInfo>; +impl<I> QueryInfo<I> { + pub(crate) fn lift<Qcx: QueryContext<QueryInfo = I>>( + &self, + qcx: Qcx, + ) -> QueryInfo<QueryStackFrameExtra> { + QueryInfo { span: self.span, query: self.query.lift(qcx) } + } +} + +pub type QueryMap<I> = FxHashMap<QueryJobId, QueryJobInfo<I>>; /// A value uniquely identifying an active query job. #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub struct QueryJobId(pub NonZero<u64>); impl QueryJobId { - fn query(self, map: &QueryMap) -> QueryStackFrame { + fn query<I: Clone>(self, map: &QueryMap<I>) -> QueryStackFrame<I> { map.get(&self).unwrap().query.clone() } - fn span(self, map: &QueryMap) -> Span { + fn span<I>(self, map: &QueryMap<I>) -> Span { map.get(&self).unwrap().job.span } - fn parent(self, map: &QueryMap) -> Option<QueryJobId> { + fn parent<I>(self, map: &QueryMap<I>) -> Option<QueryJobId> { map.get(&self).unwrap().job.parent } - fn latch(self, map: &QueryMap) -> Option<&QueryLatch> { + fn latch<I>(self, map: &QueryMap<I>) -> Option<&QueryLatch<I>> { map.get(&self).unwrap().job.latch.as_ref() } } #[derive(Clone, Debug)] -pub struct QueryJobInfo { - pub query: QueryStackFrame, - pub job: QueryJob, +pub struct QueryJobInfo<I> { + pub query: QueryStackFrame<I>, + pub job: QueryJob<I>, } /// Represents an active query job. -#[derive(Clone, Debug)] -pub struct QueryJob { +#[derive(Debug)] +pub struct QueryJob<I> { pub id: QueryJobId, /// The span corresponding to the reason for which this query was required. @@ -67,17 +78,23 @@ pub struct QueryJob { pub parent: Option<QueryJobId>, /// The latch that is used to wait on this job. - latch: Option<QueryLatch>, + latch: Option<QueryLatch<I>>, } -impl QueryJob { +impl<I> Clone for QueryJob<I> { + fn clone(&self) -> Self { + Self { id: self.id, span: self.span, parent: self.parent, latch: self.latch.clone() } + } +} + +impl<I> QueryJob<I> { /// Creates a new query job. #[inline] pub fn new(id: QueryJobId, span: Span, parent: Option<QueryJobId>) -> Self { QueryJob { id, span, parent, latch: None } } - pub(super) fn latch(&mut self) -> QueryLatch { + pub(super) fn latch(&mut self) -> QueryLatch<I> { if self.latch.is_none() { self.latch = Some(QueryLatch::new()); } @@ -97,12 +114,12 @@ impl QueryJob { } impl QueryJobId { - pub(super) fn find_cycle_in_stack( + pub(super) fn find_cycle_in_stack<I: Clone>( &self, - query_map: QueryMap, + query_map: QueryMap<I>, current_job: &Option<QueryJobId>, span: Span, - ) -> CycleError { + ) -> CycleError<I> { // Find the waitee amongst `current_job` parents let mut cycle = Vec::new(); let mut current_job = Option::clone(current_job); @@ -136,7 +153,7 @@ impl QueryJobId { #[cold] #[inline(never)] - pub fn find_dep_kind_root(&self, query_map: QueryMap) -> (QueryJobInfo, usize) { + pub fn find_dep_kind_root<I: Clone>(&self, query_map: QueryMap<I>) -> (QueryJobInfo<I>, usize) { let mut depth = 1; let info = query_map.get(&self).unwrap(); let dep_kind = info.query.dep_kind; @@ -156,25 +173,31 @@ impl QueryJobId { } #[derive(Debug)] -struct QueryWaiter { +struct QueryWaiter<I> { query: Option<QueryJobId>, condvar: Condvar, span: Span, - cycle: Mutex<Option<CycleError>>, + cycle: Mutex<Option<CycleError<I>>>, } #[derive(Debug)] -struct QueryLatchInfo { +struct QueryLatchInfo<I> { complete: bool, - waiters: Vec<Arc<QueryWaiter>>, + waiters: Vec<Arc<QueryWaiter<I>>>, } -#[derive(Clone, Debug)] -pub(super) struct QueryLatch { - info: Arc<Mutex<QueryLatchInfo>>, +#[derive(Debug)] +pub(super) struct QueryLatch<I> { + info: Arc<Mutex<QueryLatchInfo<I>>>, } -impl QueryLatch { +impl<I> Clone for QueryLatch<I> { + fn clone(&self) -> Self { + Self { info: Arc::clone(&self.info) } + } +} + +impl<I> QueryLatch<I> { fn new() -> Self { QueryLatch { info: Arc::new(Mutex::new(QueryLatchInfo { complete: false, waiters: Vec::new() })), @@ -182,7 +205,11 @@ impl QueryLatch { } /// Awaits for the query job to complete. - pub(super) fn wait_on(&self, query: Option<QueryJobId>, span: Span) -> Result<(), CycleError> { + pub(super) fn wait_on( + &self, + query: Option<QueryJobId>, + span: Span, + ) -> Result<(), CycleError<I>> { let waiter = Arc::new(QueryWaiter { query, span, cycle: Mutex::new(None), condvar: Condvar::new() }); self.wait_on_inner(&waiter); @@ -197,7 +224,7 @@ impl QueryLatch { } /// Awaits the caller on this latch by blocking the current thread. - fn wait_on_inner(&self, waiter: &Arc<QueryWaiter>) { + fn wait_on_inner(&self, waiter: &Arc<QueryWaiter<I>>) { let mut info = self.info.lock(); if !info.complete { // We push the waiter on to the `waiters` list. It can be accessed inside @@ -232,7 +259,7 @@ impl QueryLatch { /// Removes a single waiter from the list of waiters. /// This is used to break query cycles. - fn extract_waiter(&self, waiter: usize) -> Arc<QueryWaiter> { + fn extract_waiter(&self, waiter: usize) -> Arc<QueryWaiter<I>> { let mut info = self.info.lock(); debug_assert!(!info.complete); // Remove the waiter from the list of waiters @@ -252,7 +279,11 @@ type Waiter = (QueryJobId, usize); /// For visits of resumable waiters it returns Some(Some(Waiter)) which has the /// required information to resume the waiter. /// If all `visit` calls returns None, this function also returns None. -fn visit_waiters<F>(query_map: &QueryMap, query: QueryJobId, mut visit: F) -> Option<Option<Waiter>> +fn visit_waiters<I, F>( + query_map: &QueryMap<I>, + query: QueryJobId, + mut visit: F, +) -> Option<Option<Waiter>> where F: FnMut(Span, QueryJobId) -> Option<Option<Waiter>>, { @@ -282,8 +313,8 @@ where /// `span` is the reason for the `query` to execute. This is initially DUMMY_SP. /// If a cycle is detected, this initial value is replaced with the span causing /// the cycle. -fn cycle_check( - query_map: &QueryMap, +fn cycle_check<I>( + query_map: &QueryMap<I>, query: QueryJobId, span: Span, stack: &mut Vec<(Span, QueryJobId)>, @@ -322,8 +353,8 @@ fn cycle_check( /// Finds out if there's a path to the compiler root (aka. code which isn't in a query) /// from `query` without going through any of the queries in `visited`. /// This is achieved with a depth first search. -fn connected_to_root( - query_map: &QueryMap, +fn connected_to_root<I>( + query_map: &QueryMap<I>, query: QueryJobId, visited: &mut FxHashSet<QueryJobId>, ) -> bool { @@ -344,7 +375,7 @@ fn connected_to_root( } // Deterministically pick an query from a list -fn pick_query<'a, T, F>(query_map: &QueryMap, queries: &'a [T], f: F) -> &'a T +fn pick_query<'a, I: Clone, T, F>(query_map: &QueryMap<I>, queries: &'a [T], f: F) -> &'a T where F: Fn(&T) -> (Span, QueryJobId), { @@ -369,10 +400,10 @@ where /// the function return true. /// If a cycle was not found, the starting query is removed from `jobs` and /// the function returns false. -fn remove_cycle( - query_map: &QueryMap, +fn remove_cycle<I: Clone>( + query_map: &QueryMap<I>, jobs: &mut Vec<QueryJobId>, - wakelist: &mut Vec<Arc<QueryWaiter>>, + wakelist: &mut Vec<Arc<QueryWaiter<I>>>, ) -> bool { let mut visited = FxHashSet::default(); let mut stack = Vec::new(); @@ -473,7 +504,10 @@ fn remove_cycle( /// uses a query latch and then resuming that waiter. /// There may be multiple cycles involved in a deadlock, so this searches /// all active queries for cycles before finally resuming all the waiters at once. -pub fn break_query_cycles(query_map: QueryMap, registry: &rayon_core::Registry) { +pub fn break_query_cycles<I: Clone + Debug>( + query_map: QueryMap<I>, + registry: &rayon_core::Registry, +) { let mut wakelist = Vec::new(); let mut jobs: Vec<QueryJobId> = query_map.keys().cloned().collect(); @@ -520,7 +554,7 @@ pub fn report_cycle<'a>( ) -> Diag<'a> { assert!(!stack.is_empty()); - let span = stack[0].query.default_span(stack[1 % stack.len()].span); + let span = stack[0].query.info.default_span(stack[1 % stack.len()].span); let mut cycle_stack = Vec::new(); @@ -529,31 +563,31 @@ pub fn report_cycle<'a>( for i in 1..stack.len() { let query = &stack[i].query; - let span = query.default_span(stack[(i + 1) % stack.len()].span); - cycle_stack.push(CycleStack { span, desc: query.description.to_owned() }); + let span = query.info.default_span(stack[(i + 1) % stack.len()].span); + cycle_stack.push(CycleStack { span, desc: query.info.description.to_owned() }); } let mut cycle_usage = None; if let Some((span, ref query)) = *usage { cycle_usage = Some(crate::error::CycleUsage { - span: query.default_span(span), - usage: query.description.to_string(), + span: query.info.default_span(span), + usage: query.info.description.to_string(), }); } - let alias = if stack.iter().all(|entry| matches!(entry.query.def_kind, Some(DefKind::TyAlias))) - { - Some(crate::error::Alias::Ty) - } else if stack.iter().all(|entry| entry.query.def_kind == Some(DefKind::TraitAlias)) { - Some(crate::error::Alias::Trait) - } else { - None - }; + let alias = + if stack.iter().all(|entry| matches!(entry.query.info.def_kind, Some(DefKind::TyAlias))) { + Some(crate::error::Alias::Ty) + } else if stack.iter().all(|entry| entry.query.info.def_kind == Some(DefKind::TraitAlias)) { + Some(crate::error::Alias::Trait) + } else { + None + }; let cycle_diag = crate::error::Cycle { span, cycle_stack, - stack_bottom: stack[0].query.description.to_owned(), + stack_bottom: stack[0].query.info.description.to_owned(), alias, cycle_usage, stack_count, @@ -589,6 +623,7 @@ pub fn print_query_stack<Qcx: QueryContext>( let Some(query_info) = query_map.get(&query) else { break; }; + let query_extra = qcx.lift_query_info(&query_info.query.info); if Some(count_printed) < limit_frames || limit_frames.is_none() { // Only print to stderr as many stack frames as `num_frames` when present. // FIXME: needs translation @@ -596,7 +631,7 @@ pub fn print_query_stack<Qcx: QueryContext>( #[allow(rustc::untranslatable_diagnostic)] dcx.struct_failure_note(format!( "#{} [{:?}] {}", - count_printed, query_info.query.dep_kind, query_info.query.description + count_printed, query_info.query.dep_kind, query_extra.description )) .with_span(query_info.job.span) .emit(); @@ -609,7 +644,7 @@ pub fn print_query_stack<Qcx: QueryContext>( "#{} [{}] {}", count_total, qcx.dep_context().dep_kind_info(query_info.query.dep_kind).name, - query_info.query.description + query_extra.description ); } diff --git a/compiler/rustc_query_system/src/query/mod.rs b/compiler/rustc_query_system/src/query/mod.rs index 0d0c66aa978..ef21af7dafb 100644 --- a/compiler/rustc_query_system/src/query/mod.rs +++ b/compiler/rustc_query_system/src/query/mod.rs @@ -1,4 +1,9 @@ mod plumbing; +use std::fmt::Debug; +use std::marker::PhantomData; +use std::mem::transmute; +use std::sync::Arc; + pub use self::plumbing::*; mod job; @@ -11,6 +16,7 @@ mod caches; pub use self::caches::{DefIdCache, DefaultCache, QueryCache, SingleCache, VecCache}; mod config; +use rustc_data_structures::sync::{DynSend, DynSync}; use rustc_errors::DiagInner; use rustc_hashes::Hash64; use rustc_hir::def::DefKind; @@ -25,31 +31,59 @@ use crate::dep_graph::{DepKind, DepNodeIndex, HasDepContext, SerializedDepNodeIn /// /// This is mostly used in case of cycles for error reporting. #[derive(Clone, Debug)] -pub struct QueryStackFrame { - pub description: String, - span: Option<Span>, - pub def_id: Option<DefId>, - pub def_kind: Option<DefKind>, - /// A def-id that is extracted from a `Ty` in a query key - pub def_id_for_ty_in_cycle: Option<DefId>, +pub struct QueryStackFrame<I> { + /// This field initially stores a `QueryStackDeferred` during collection, + /// but can later be changed to `QueryStackFrameExtra` containing concrete information + /// by calling `lift`. This is done so that collecting query does not need to invoke + /// queries, instead `lift` will call queries in a more appropriate location. + pub info: I, + pub dep_kind: DepKind, /// This hash is used to deterministically pick /// a query to remove cycles in the parallel compiler. hash: Hash64, + pub def_id: Option<DefId>, + /// A def-id that is extracted from a `Ty` in a query key + pub def_id_for_ty_in_cycle: Option<DefId>, } -impl QueryStackFrame { +impl<I> QueryStackFrame<I> { #[inline] pub fn new( - description: String, - span: Option<Span>, - def_id: Option<DefId>, - def_kind: Option<DefKind>, + info: I, dep_kind: DepKind, - def_id_for_ty_in_cycle: Option<DefId>, hash: impl FnOnce() -> Hash64, + def_id: Option<DefId>, + def_id_for_ty_in_cycle: Option<DefId>, ) -> Self { - Self { description, span, def_id, def_kind, def_id_for_ty_in_cycle, dep_kind, hash: hash() } + Self { info, def_id, dep_kind, hash: hash(), def_id_for_ty_in_cycle } + } + + fn lift<Qcx: QueryContext<QueryInfo = I>>( + &self, + qcx: Qcx, + ) -> QueryStackFrame<QueryStackFrameExtra> { + QueryStackFrame { + info: qcx.lift_query_info(&self.info), + dep_kind: self.dep_kind, + hash: self.hash, + def_id: self.def_id, + def_id_for_ty_in_cycle: self.def_id_for_ty_in_cycle, + } + } +} + +#[derive(Clone, Debug)] +pub struct QueryStackFrameExtra { + pub description: String, + span: Option<Span>, + pub def_kind: Option<DefKind>, +} + +impl QueryStackFrameExtra { + #[inline] + pub fn new(description: String, span: Option<Span>, def_kind: Option<DefKind>) -> Self { + Self { description, span, def_kind } } // FIXME(eddyb) Get more valid `Span`s on queries. @@ -62,7 +96,41 @@ impl QueryStackFrame { } } -/// Track a 'side effects' for a particular query. +/// Track a 'side effect' for a particular query. +/// This is used to hold a closure which can create `QueryStackFrameExtra`. +#[derive(Clone)] +pub struct QueryStackDeferred<'tcx> { + _dummy: PhantomData<&'tcx ()>, + + // `extract` may contain references to 'tcx, but we can't tell drop checking that it won't + // access it in the destructor. + extract: Arc<dyn Fn() -> QueryStackFrameExtra + DynSync + DynSend>, +} + +impl<'tcx> QueryStackDeferred<'tcx> { + pub fn new<C: Copy + DynSync + DynSend + 'tcx>( + context: C, + extract: fn(C) -> QueryStackFrameExtra, + ) -> Self { + let extract: Arc<dyn Fn() -> QueryStackFrameExtra + DynSync + DynSend + 'tcx> = + Arc::new(move || extract(context)); + // SAFETY: The `extract` closure does not access 'tcx in its destructor as the only + // captured variable is `context` which is Copy and cannot have a destructor. + Self { _dummy: PhantomData, extract: unsafe { transmute(extract) } } + } + + pub fn extract(&self) -> QueryStackFrameExtra { + (self.extract)() + } +} + +impl<'tcx> Debug for QueryStackDeferred<'tcx> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("QueryStackDeferred") + } +} + +/// Tracks 'side effects' for a particular query. /// This struct is saved to disk along with the query result, /// and loaded from disk if we mark the query as green. /// This allows us to 'replay' changes to global state @@ -81,12 +149,16 @@ pub enum QuerySideEffect { } pub trait QueryContext: HasDepContext { + type QueryInfo: Clone; + fn next_job_id(self) -> QueryJobId; /// Get the query information from the TLS context. fn current_query_job(self) -> Option<QueryJobId>; - fn collect_active_jobs(self) -> Result<QueryMap, QueryMap>; + fn collect_active_jobs(self) -> Result<QueryMap<Self::QueryInfo>, QueryMap<Self::QueryInfo>>; + + fn lift_query_info(self, info: &Self::QueryInfo) -> QueryStackFrameExtra; /// Load a side effect associated to the node in the previous session. fn load_side_effect( diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs index 3a9d80280c2..6ea8e3b9200 100644 --- a/compiler/rustc_query_system/src/query/plumbing.rs +++ b/compiler/rustc_query_system/src/query/plumbing.rs @@ -16,7 +16,7 @@ use rustc_errors::{Diag, FatalError, StashKey}; use rustc_span::{DUMMY_SP, Span}; use tracing::instrument; -use super::QueryConfig; +use super::{QueryConfig, QueryStackFrameExtra}; use crate::HandleCycleError; use crate::dep_graph::{DepContext, DepGraphData, DepNode, DepNodeIndex, DepNodeParams}; use crate::ich::StableHashingContext; @@ -29,23 +29,23 @@ fn equivalent_key<K: Eq, V>(k: &K) -> impl Fn(&(K, V)) -> bool + '_ { move |x| x.0 == *k } -pub struct QueryState<K> { - active: Sharded<hashbrown::HashTable<(K, QueryResult)>>, +pub struct QueryState<K, I> { + active: Sharded<hashbrown::HashTable<(K, QueryResult<I>)>>, } /// Indicates the state of a query for a given key in a query map. -enum QueryResult { +enum QueryResult<I> { /// An already executing query. The query job can be used to await for its completion. - Started(QueryJob), + Started(QueryJob<I>), /// The query panicked. Queries trying to wait on this will raise a fatal error which will /// silently panic. Poisoned, } -impl QueryResult { +impl<I> QueryResult<I> { /// Unwraps the query job expecting that it has started. - fn expect_job(self) -> QueryJob { + fn expect_job(self) -> QueryJob<I> { match self { Self::Started(job) => job, Self::Poisoned => { @@ -55,7 +55,7 @@ impl QueryResult { } } -impl<K> QueryState<K> +impl<K, I> QueryState<K, I> where K: Eq + Hash + Copy + Debug, { @@ -66,8 +66,8 @@ where pub fn try_collect_active_jobs<Qcx: Copy>( &self, qcx: Qcx, - make_query: fn(Qcx, K) -> QueryStackFrame, - jobs: &mut QueryMap, + make_query: fn(Qcx, K) -> QueryStackFrame<I>, + jobs: &mut QueryMap<I>, ) -> Option<()> { let mut active = Vec::new(); @@ -76,7 +76,7 @@ where for shard in self.active.try_lock_shards() { for (k, v) in shard?.iter() { if let QueryResult::Started(ref job) = *v { - active.push((*k, job.clone())); + active.push((*k, (*job).clone())); } } } @@ -92,19 +92,19 @@ where } } -impl<K> Default for QueryState<K> { - fn default() -> QueryState<K> { +impl<K, I> Default for QueryState<K, I> { + fn default() -> QueryState<K, I> { QueryState { active: Default::default() } } } /// A type representing the responsibility to execute the job in the `job` field. /// This will poison the relevant query if dropped. -struct JobOwner<'tcx, K> +struct JobOwner<'tcx, K, I> where K: Eq + Hash + Copy, { - state: &'tcx QueryState<K>, + state: &'tcx QueryState<K, I>, key: K, } @@ -146,7 +146,7 @@ where } Stash => { let guar = if let Some(root) = cycle_error.cycle.first() - && let Some(span) = root.query.span + && let Some(span) = root.query.info.span { error.stash(span, StashKey::Cycle).unwrap() } else { @@ -157,7 +157,7 @@ where } } -impl<'tcx, K> JobOwner<'tcx, K> +impl<'tcx, K, I> JobOwner<'tcx, K, I> where K: Eq + Hash + Copy, { @@ -194,7 +194,7 @@ where } } -impl<'tcx, K> Drop for JobOwner<'tcx, K> +impl<'tcx, K, I> Drop for JobOwner<'tcx, K, I> where K: Eq + Hash + Copy, { @@ -222,10 +222,19 @@ where } #[derive(Clone, Debug)] -pub struct CycleError { +pub struct CycleError<I = QueryStackFrameExtra> { /// The query and related span that uses the cycle. - pub usage: Option<(Span, QueryStackFrame)>, - pub cycle: Vec<QueryInfo>, + pub usage: Option<(Span, QueryStackFrame<I>)>, + pub cycle: Vec<QueryInfo<I>>, +} + +impl<I> CycleError<I> { + fn lift<Qcx: QueryContext<QueryInfo = I>>(&self, qcx: Qcx) -> CycleError<QueryStackFrameExtra> { + CycleError { + usage: self.usage.as_ref().map(|(span, frame)| (*span, frame.lift(qcx))), + cycle: self.cycle.iter().map(|info| info.lift(qcx)).collect(), + } + } } /// Checks whether there is already a value for this key in the in-memory @@ -262,10 +271,10 @@ where { // Ensure there was no errors collecting all active jobs. // We need the complete map to ensure we find a cycle to break. - let query_map = qcx.collect_active_jobs().expect("failed to collect active queries"); + let query_map = qcx.collect_active_jobs().ok().expect("failed to collect active queries"); let error = try_execute.find_cycle_in_stack(query_map, &qcx.current_query_job(), span); - (mk_cycle(query, qcx, error), None) + (mk_cycle(query, qcx, error.lift(qcx)), None) } #[inline(always)] @@ -274,7 +283,7 @@ fn wait_for_query<Q, Qcx>( qcx: Qcx, span: Span, key: Q::Key, - latch: QueryLatch, + latch: QueryLatch<Qcx::QueryInfo>, current: Option<QueryJobId>, ) -> (Q::Value, Option<DepNodeIndex>) where @@ -314,7 +323,7 @@ where (v, Some(index)) } - Err(cycle) => (mk_cycle(query, qcx, cycle), None), + Err(cycle) => (mk_cycle(query, qcx, cycle.lift(qcx)), None), } } @@ -392,7 +401,7 @@ where fn execute_job<Q, Qcx, const INCR: bool>( query: Q, qcx: Qcx, - state: &QueryState<Q::Key>, + state: &QueryState<Q::Key, Qcx::QueryInfo>, key: Q::Key, key_hash: u64, id: QueryJobId, diff --git a/compiler/rustc_resolve/src/def_collector.rs b/compiler/rustc_resolve/src/def_collector.rs index fcb638a117e..6f48a75d617 100644 --- a/compiler/rustc_resolve/src/def_collector.rs +++ b/compiler/rustc_resolve/src/def_collector.rs @@ -19,18 +19,15 @@ pub(crate) fn collect_definitions( fragment: &AstFragment, expansion: LocalExpnId, ) { - let InvocationParent { parent_def, impl_trait_context, in_attr } = - resolver.invocation_parents[&expansion]; - let mut visitor = DefCollector { resolver, parent_def, expansion, impl_trait_context, in_attr }; + let invocation_parent = resolver.invocation_parents[&expansion]; + let mut visitor = DefCollector { resolver, expansion, invocation_parent }; fragment.visit_with(&mut visitor); } /// Creates `DefId`s for nodes in the AST. struct DefCollector<'a, 'ra, 'tcx> { resolver: &'a mut Resolver<'ra, 'tcx>, - parent_def: LocalDefId, - impl_trait_context: ImplTraitContext, - in_attr: bool, + invocation_parent: InvocationParent, expansion: LocalExpnId, } @@ -42,7 +39,7 @@ impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> { def_kind: DefKind, span: Span, ) -> LocalDefId { - let parent_def = self.parent_def; + let parent_def = self.invocation_parent.parent_def; debug!( "create_def(node_id={:?}, def_kind={:?}, parent_def={:?})", node_id, def_kind, parent_def @@ -60,9 +57,9 @@ impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> { } fn with_parent<F: FnOnce(&mut Self)>(&mut self, parent_def: LocalDefId, f: F) { - let orig_parent_def = mem::replace(&mut self.parent_def, parent_def); + let orig_parent_def = mem::replace(&mut self.invocation_parent.parent_def, parent_def); f(self); - self.parent_def = orig_parent_def; + self.invocation_parent.parent_def = orig_parent_def; } fn with_impl_trait<F: FnOnce(&mut Self)>( @@ -70,9 +67,10 @@ impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> { impl_trait_context: ImplTraitContext, f: F, ) { - let orig_itc = mem::replace(&mut self.impl_trait_context, impl_trait_context); + let orig_itc = + mem::replace(&mut self.invocation_parent.impl_trait_context, impl_trait_context); f(self); - self.impl_trait_context = orig_itc; + self.invocation_parent.impl_trait_context = orig_itc; } fn collect_field(&mut self, field: &'a FieldDef, index: Option<usize>) { @@ -96,14 +94,7 @@ impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> { fn visit_macro_invoc(&mut self, id: NodeId) { let id = id.placeholder_to_expn_id(); - let old_parent = self.resolver.invocation_parents.insert( - id, - InvocationParent { - parent_def: self.parent_def, - impl_trait_context: self.impl_trait_context, - in_attr: self.in_attr, - }, - ); + let old_parent = self.resolver.invocation_parents.insert(id, self.invocation_parent); assert!(old_parent.is_none(), "parent `LocalDefId` is reset for an invocation"); } } @@ -367,7 +358,7 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> { self.with_parent(def, |this| visit::walk_anon_const(this, constant)); return; } - _ => self.parent_def, + _ => self.invocation_parent.parent_def, }; self.with_parent(parent_def, |this| visit::walk_expr(this, expr)) @@ -382,13 +373,13 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> { // output or built artifacts, so replace them here... // Perhaps we should instead format APITs more robustly. let name = Symbol::intern(&pprust::ty_to_string(ty).replace('\n', " ")); - let kind = match self.impl_trait_context { + let kind = match self.invocation_parent.impl_trait_context { ImplTraitContext::Universal => DefKind::TyParam, ImplTraitContext::Existential => DefKind::OpaqueTy, ImplTraitContext::InBinding => return visit::walk_ty(self, ty), }; let id = self.create_def(*id, Some(name), kind, ty.span); - match self.impl_trait_context { + match self.invocation_parent.impl_trait_context { // Do not nest APIT, as we desugar them as `impl_trait: bounds`, // so the `impl_trait` node is not a parent to `bounds`. ImplTraitContext::Universal => visit::walk_ty(self, ty), @@ -459,9 +450,9 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> { } fn visit_attribute(&mut self, attr: &'a Attribute) -> Self::Result { - let orig_in_attr = mem::replace(&mut self.in_attr, true); + let orig_in_attr = mem::replace(&mut self.invocation_parent.in_attr, true); visit::walk_attribute(self, attr); - self.in_attr = orig_in_attr; + self.invocation_parent.in_attr = orig_in_attr; } fn visit_inline_asm(&mut self, asm: &'a InlineAsm) { diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 6bc644e9e11..533e216ddb2 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -3329,34 +3329,44 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { }, |this| { this.with_lifetime_rib( - LifetimeRibKind::StaticIfNoLifetimeInScope { - lint_id: item.id, - // In impls, it's not a hard error yet due to backcompat. - emit_lint: true, + // Until these are a hard error, we need to create them within the correct binder, + // Otherwise the lifetimes of this assoc const think they are lifetimes of the trait. + LifetimeRibKind::AnonymousCreateParameter { + binder: item.id, + report_in_path: true, }, |this| { - // If this is a trait impl, ensure the const - // exists in trait - this.check_trait_item( - item.id, - item.ident, - &item.kind, - ValueNS, - item.span, - seen_trait_items, - |i, s, c| ConstNotMemberOfTrait(i, s, c), - ); + this.with_lifetime_rib( + LifetimeRibKind::StaticIfNoLifetimeInScope { + lint_id: item.id, + // In impls, it's not a hard error yet due to backcompat. + emit_lint: true, + }, + |this| { + // If this is a trait impl, ensure the const + // exists in trait + this.check_trait_item( + item.id, + item.ident, + &item.kind, + ValueNS, + item.span, + seen_trait_items, + |i, s, c| ConstNotMemberOfTrait(i, s, c), + ); - this.visit_generics(generics); - this.visit_ty(ty); - if let Some(expr) = expr { - // We allow arbitrary const expressions inside of associated consts, - // even if they are potentially not const evaluatable. - // - // Type parameters can already be used and as associated consts are - // not used as part of the type system, this is far less surprising. - this.resolve_const_body(expr, None); - } + this.visit_generics(generics); + this.visit_ty(ty); + if let Some(expr) = expr { + // We allow arbitrary const expressions inside of associated consts, + // even if they are potentially not const evaluatable. + // + // Type parameters can already be used and as associated consts are + // not used as part of the type system, this is far less surprising. + this.resolve_const_body(expr, None); + } + }, + ) }, ); }, diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs index cf8db2267f4..e40f84e7e59 100644 --- a/compiler/rustc_resolve/src/late/diagnostics.rs +++ b/compiler/rustc_resolve/src/late/diagnostics.rs @@ -1665,41 +1665,81 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { // the struct literal syntax at all, as that will cause a subsequent error. let fields = this.r.field_idents(def_id); let has_fields = fields.as_ref().is_some_and(|f| !f.is_empty()); - let (fields, applicability) = match fields { - Some(fields) => { - let fields = if let Some(old_fields) = old_fields { - fields - .iter() - .enumerate() - .map(|(idx, new)| (new, old_fields.get(idx))) - .map(|(new, old)| { - if let Some(Some(old)) = old - && new.as_str() != old - { - format!("{new}: {old}") - } else { - new.to_string() - } - }) - .collect::<Vec<String>>() - } else { - fields - .iter() - .map(|f| format!("{f}{tail}")) - .collect::<Vec<String>>() - }; - - (fields.join(", "), applicability) - } - None => ("/* fields */".to_string(), Applicability::HasPlaceholders), - }; - let pad = if has_fields { " " } else { "" }; - err.span_suggestion( + + if let PathSource::Expr(Some(Expr { + kind: ExprKind::Call(path, args), span, - format!("use struct {descr} syntax instead"), - format!("{path_str} {{{pad}{fields}{pad}}}"), - applicability, - ); + .. + })) = source + && !args.is_empty() + && let Some(fields) = &fields + && args.len() == fields.len() + // Make sure we have same number of args as fields + { + let path_span = path.span; + let mut parts = Vec::new(); + + // Start with the opening brace + parts.push(( + path_span.shrink_to_hi().until(args[0].span), + "{".to_owned(), + )); + + for (field, arg) in fields.iter().zip(args.iter()) { + // Add the field name before the argument + parts.push((arg.span.shrink_to_lo(), format!("{}: ", field))); + } + + // Add the closing brace + parts.push(( + args.last().unwrap().span.shrink_to_hi().until(span.shrink_to_hi()), + "}".to_owned(), + )); + + err.multipart_suggestion_verbose( + format!("use struct {descr} syntax instead of calling"), + parts, + applicability, + ); + } else { + let (fields, applicability) = match fields { + Some(fields) => { + let fields = if let Some(old_fields) = old_fields { + fields + .iter() + .enumerate() + .map(|(idx, new)| (new, old_fields.get(idx))) + .map(|(new, old)| { + if let Some(Some(old)) = old + && new.as_str() != old + { + format!("{new}: {old}") + } else { + new.to_string() + } + }) + .collect::<Vec<String>>() + } else { + fields + .iter() + .map(|f| format!("{f}{tail}")) + .collect::<Vec<String>>() + }; + + (fields.join(", "), applicability) + } + None => { + ("/* fields */".to_string(), Applicability::HasPlaceholders) + } + }; + let pad = if has_fields { " " } else { "" }; + err.span_suggestion( + span, + format!("use struct {descr} syntax instead"), + format!("{path_str} {{{pad}{fields}{pad}}}"), + applicability, + ); + } } if let PathSource::Expr(Some(Expr { kind: ExprKind::Call(path, args), diff --git a/compiler/rustc_span/src/edit_distance.rs b/compiler/rustc_span/src/edit_distance.rs index 8a2baaa42e2..4f3202b694c 100644 --- a/compiler/rustc_span/src/edit_distance.rs +++ b/compiler/rustc_span/src/edit_distance.rs @@ -118,7 +118,7 @@ pub fn edit_distance_with_substrings(a: &str, b: &str, limit: usize) -> Option<u // Check one isn't less than half the length of the other. If this is true then there is a // big difference in length. let big_len_diff = (n * 2) < m || (m * 2) < n; - let len_diff = if n < m { m - n } else { n - m }; + let len_diff = m.abs_diff(n); let distance = edit_distance(a, b, limit + len_diff)?; // This is the crux, subtracting length difference means exact substring matches will now be 0 diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 3e474243965..6807f96e712 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -623,6 +623,7 @@ symbols! { cfg_target_has_atomic_equal_alignment, cfg_target_thread_local, cfg_target_vendor, + cfg_trace: "<cfg>", // must not be a valid identifier cfg_ub_checks, cfg_version, cfi, @@ -2118,7 +2119,9 @@ symbols! { type_changing_struct_update, type_const, type_id, + type_ir_infer_ctxt_like, type_ir_inherent, + type_ir_interner, type_length_limit, type_macros, type_name, diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index fc352499146..4cfd8149b1e 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -16,7 +16,7 @@ use rustc_infer::traits::{Obligation, PolyTraitObligation, SelectionError}; use rustc_middle::ty::fast_reject::DeepRejectCtxt; use rustc_middle::ty::{self, Ty, TypeVisitableExt, TypingMode}; use rustc_middle::{bug, span_bug}; -use rustc_type_ir::{Interner, elaborate}; +use rustc_type_ir::elaborate; use tracing::{debug, instrument, trace}; use super::SelectionCandidate::*; @@ -802,7 +802,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::UnsafeBinder(_) => { // Only consider auto impls of unsafe traits when there are // no unsafe fields. - if self.tcx().trait_is_unsafe(def_id) && self_ty.has_unsafe_fields() { + if self.tcx().trait_def(def_id).safety.is_unsafe() + && self_ty.has_unsafe_fields() + { return; } diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index 4404324d5cd..a66c958c109 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -266,9 +266,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } else { bug!("unexpected builtin trait {:?}", trait_def) }; - let BuiltinImplConditions::Where(nested) = conditions else { + let BuiltinImplConditions::Where(types) = conditions else { bug!("obligation {:?} had matched a builtin impl but now doesn't", obligation); }; + let types = self.infcx.enter_forall_and_leak_universe(types); let cause = obligation.derived_cause(ObligationCauseCode::BuiltinDerived); self.collect_predicates_for_types( @@ -276,7 +277,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { cause, obligation.recursion_depth + 1, trait_def, - nested, + types, ) } else { PredicateObligations::new() @@ -444,37 +445,25 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { &mut self, obligation: &PolyTraitObligation<'tcx>, ) -> Result<PredicateObligations<'tcx>, SelectionError<'tcx>> { - debug!(?obligation, "confirm_auto_impl_candidate"); - - let self_ty = obligation.predicate.self_ty().map_bound(|ty| self.infcx.shallow_resolve(ty)); - let types = self.constituent_types_for_ty(self_ty)?; - Ok(self.vtable_auto_impl(obligation, obligation.predicate.def_id(), types)) - } - - /// See `confirm_auto_impl_candidate`. - fn vtable_auto_impl( - &mut self, - obligation: &PolyTraitObligation<'tcx>, - trait_def_id: DefId, - nested: ty::Binder<'tcx, Vec<Ty<'tcx>>>, - ) -> PredicateObligations<'tcx> { - debug!(?nested, "vtable_auto_impl"); ensure_sufficient_stack(|| { - let cause = obligation.derived_cause(ObligationCauseCode::BuiltinDerived); - assert_eq!(obligation.predicate.polarity(), ty::PredicatePolarity::Positive); + let self_ty = + obligation.predicate.self_ty().map_bound(|ty| self.infcx.shallow_resolve(ty)); + + let types = self.constituent_types_for_ty(self_ty)?; + let types = self.infcx.enter_forall_and_leak_universe(types); + + let cause = obligation.derived_cause(ObligationCauseCode::BuiltinDerived); let obligations = self.collect_predicates_for_types( obligation.param_env, cause, obligation.recursion_depth + 1, - trait_def_id, - nested, + obligation.predicate.def_id(), + types, ); - debug!(?obligations, "vtable_auto_impl"); - - obligations + Ok(obligations) }) } diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index e1adabbeaa6..e439df76cd4 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -2370,7 +2370,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { cause: ObligationCause<'tcx>, recursion_depth: usize, trait_def_id: DefId, - types: ty::Binder<'tcx, Vec<Ty<'tcx>>>, + types: Vec<Ty<'tcx>>, ) -> PredicateObligations<'tcx> { // Because the types were potentially derived from // higher-ranked obligations they may reference late-bound @@ -2387,13 +2387,8 @@ impl<'tcx> SelectionContext<'_, 'tcx> { // 3. Re-bind the regions back to `for<'a> &'a i32 : Copy` types - .as_ref() - .skip_binder() // binder moved -\ - .iter() - .flat_map(|ty| { - let ty: ty::Binder<'tcx, Ty<'tcx>> = types.rebind(*ty); // <----/ - - let placeholder_ty = self.infcx.enter_forall_and_leak_universe(ty); + .into_iter() + .flat_map(|placeholder_ty| { let Normalized { value: normalized_ty, mut obligations } = ensure_sufficient_stack(|| { normalize_with_depth( diff --git a/compiler/rustc_type_ir/src/infer_ctxt.rs b/compiler/rustc_type_ir/src/infer_ctxt.rs index 26c49510696..e512e8fc838 100644 --- a/compiler/rustc_type_ir/src/infer_ctxt.rs +++ b/compiler/rustc_type_ir/src/infer_ctxt.rs @@ -102,6 +102,7 @@ impl<I: Interner> TypingMode<I> { } } +#[cfg_attr(feature = "nightly", rustc_diagnostic_item = "type_ir_infer_ctxt_like")] pub trait InferCtxtLike: Sized { type Interner: Interner; fn cx(&self) -> Self::Interner; diff --git a/compiler/rustc_type_ir/src/interner.rs b/compiler/rustc_type_ir/src/interner.rs index e765cb66d00..8f86270d7dc 100644 --- a/compiler/rustc_type_ir/src/interner.rs +++ b/compiler/rustc_type_ir/src/interner.rs @@ -15,6 +15,7 @@ use crate::solve::{CanonicalInput, ExternalConstraintsData, PredefinedOpaquesDat use crate::visit::{Flags, TypeSuperVisitable, TypeVisitable}; use crate::{self as ty, search_graph}; +#[cfg_attr(feature = "nightly", rustc_diagnostic_item = "type_ir_interner")] pub trait Interner: Sized + Copy diff --git a/compiler/rustc_type_ir/src/lib.rs b/compiler/rustc_type_ir/src/lib.rs index 62912189509..4e2baca2785 100644 --- a/compiler/rustc_type_ir/src/lib.rs +++ b/compiler/rustc_type_ir/src/lib.rs @@ -6,6 +6,7 @@ feature(associated_type_defaults, never_type, rustc_attrs, negative_impls) )] #![cfg_attr(feature = "nightly", allow(internal_features))] +#![cfg_attr(not(bootstrap), allow(rustc::usage_of_type_ir_traits))] // tidy-alphabetical-end extern crate self as rustc_type_ir; diff --git a/library/core/src/ffi/primitives.rs b/library/core/src/ffi/primitives.rs index 0a70eb4da55..351bf9f8314 100644 --- a/library/core/src/ffi/primitives.rs +++ b/library/core/src/ffi/primitives.rs @@ -35,7 +35,7 @@ type_alias! { "c_float.md", c_float = f32; } type_alias! { "c_double.md", c_double = f64; } mod c_char_definition { - cfg_if! { + crate::cfg_match! { // These are the targets on which c_char is unsigned. Usually the // signedness is the same for all target_os values on a given architecture // but there are some exceptions (see isSignedCharDefault() in clang). @@ -105,7 +105,7 @@ mod c_char_definition { // architecture defaults). As we only have a target for userspace apps so there are no // special cases for L4Re below. // https://github.com/rust-lang/rust/pull/132975#issuecomment-2484645240 - if #[cfg(all( + all( not(windows), not(target_vendor = "apple"), not(target_os = "vita"), @@ -122,24 +122,27 @@ mod c_char_definition { target_arch = "s390x", target_arch = "xtensa", ) - ))] { + ) => { pub(super) type c_char = u8; - } else { - // On every other target, c_char is signed. + } + // On every other target, c_char is signed. + _ => { pub(super) type c_char = i8; } } } mod c_long_definition { - cfg_if! { - if #[cfg(any( + crate::cfg_match! { + any( all(target_pointer_width = "64", not(windows)), // wasm32 Linux ABI uses 64-bit long - all(target_arch = "wasm32", target_os = "linux")))] { + all(target_arch = "wasm32", target_os = "linux") + ) => { pub(super) type c_long = i64; pub(super) type c_ulong = u64; - } else { + } + _ => { // The minimal size of `long` in the C standard is 32 bits pub(super) type c_long = i32; pub(super) type c_ulong = u32; @@ -169,11 +172,12 @@ pub type c_ptrdiff_t = isize; pub type c_ssize_t = isize; mod c_int_definition { - cfg_if! { - if #[cfg(any(target_arch = "avr", target_arch = "msp430"))] { + crate::cfg_match! { + any(target_arch = "avr", target_arch = "msp430") => { pub(super) type c_int = i16; pub(super) type c_uint = u16; - } else { + } + _ => { pub(super) type c_int = i32; pub(super) type c_uint = u32; } diff --git a/library/core/src/internal_macros.rs b/library/core/src/internal_macros.rs index fe4fa80263c..2aaefba2468 100644 --- a/library/core/src/internal_macros.rs +++ b/library/core/src/internal_macros.rs @@ -120,80 +120,3 @@ macro_rules! impl_fn_for_zst { )+ } } - -/// A macro for defining `#[cfg]` if-else statements. -/// -/// `cfg_if` is similar to the `if/elif` C preprocessor macro by allowing definition of a cascade -/// of `#[cfg]` cases, emitting the implementation which matches first. -/// -/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code without having to -/// rewrite each clause multiple times. -/// -/// # Example -/// -/// ```ignore(cannot-test-this-because-non-exported-macro) -/// cfg_if! { -/// if #[cfg(unix)] { -/// fn foo() { /* unix specific functionality */ } -/// } else if #[cfg(target_pointer_width = "32")] { -/// fn foo() { /* non-unix, 32-bit functionality */ } -/// } else { -/// fn foo() { /* fallback implementation */ } -/// } -/// } -/// -/// # fn main() {} -/// ``` -// This is a copy of `cfg_if!` from the `cfg_if` crate. -// The recursive invocations should use $crate if this is ever exported. -macro_rules! cfg_if { - // match if/else chains with a final `else` - ( - $( - if #[cfg( $i_meta:meta )] { $( $i_tokens:tt )* } - ) else+ - else { $( $e_tokens:tt )* } - ) => { - cfg_if! { - @__items () ; - $( - (( $i_meta ) ( $( $i_tokens )* )) , - )+ - (() ( $( $e_tokens )* )) , - } - }; - - // Internal and recursive macro to emit all the items - // - // Collects all the previous cfgs in a list at the beginning, so they can be - // negated. After the semicolon is all the remaining items. - (@__items ( $( $_:meta , )* ) ; ) => {}; - ( - @__items ( $( $no:meta , )* ) ; - (( $( $yes:meta )? ) ( $( $tokens:tt )* )) , - $( $rest:tt , )* - ) => { - // Emit all items within one block, applying an appropriate #[cfg]. The - // #[cfg] will require all `$yes` matchers specified and must also negate - // all previous matchers. - #[cfg(all( - $( $yes , )? - not(any( $( $no ),* )) - ))] - cfg_if! { @__identity $( $tokens )* } - - // Recurse to emit all other items in `$rest`, and when we do so add all - // our `$yes` matchers to the list of `$no` matchers as future emissions - // will have to negate everything we just matched as well. - cfg_if! { - @__items ( $( $no , )* $( $yes , )? ) ; - $( $rest , )* - } - }; - - // Internal macro to make __apply work out right for different match types, - // because of how macros match/expand stuff. - (@__identity $( $tokens:tt )* ) => { - $( $tokens )* - }; -} diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index e1ca69edcbb..dc06aa4c38d 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -100,6 +100,7 @@ #![feature(bigint_helper_methods)] #![feature(bstr)] #![feature(bstr_internals)] +#![feature(cfg_match)] #![feature(closure_track_caller)] #![feature(const_carrying_mul_add)] #![feature(const_eval_select)] diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs index 79d864e1b19..53373584d55 100644 --- a/library/core/src/num/f32.rs +++ b/library/core/src/num/f32.rs @@ -14,7 +14,7 @@ use crate::convert::FloatToInt; use crate::num::FpCategory; use crate::panic::const_assert; -use crate::{intrinsics, mem}; +use crate::{cfg_match, intrinsics, mem}; /// The radix or base of the internal representation of `f32`. /// Use [`f32::RADIX`] instead. @@ -996,21 +996,22 @@ impl f32 { #[stable(feature = "num_midpoint", since = "1.85.0")] #[rustc_const_stable(feature = "num_midpoint", since = "1.85.0")] pub const fn midpoint(self, other: f32) -> f32 { - cfg_if! { + cfg_match! { // Allow faster implementation that have known good 64-bit float // implementations. Falling back to the branchy code on targets that don't // have 64-bit hardware floats or buggy implementations. // https://github.com/rust-lang/rust/pull/121062#issuecomment-2123408114 - if #[cfg(any( - target_arch = "x86_64", - target_arch = "aarch64", - all(any(target_arch = "riscv32", target_arch = "riscv64"), target_feature = "d"), - all(target_arch = "arm", target_feature = "vfp2"), - target_arch = "wasm32", - target_arch = "wasm64", - ))] { + any( + target_arch = "x86_64", + target_arch = "aarch64", + all(any(target_arch = "riscv32", target_arch = "riscv64"), target_feature = "d"), + all(target_arch = "arm", target_feature = "vfp2"), + target_arch = "wasm32", + target_arch = "wasm64", + ) => { ((self as f64 + other as f64) / 2.0) as f32 - } else { + } + _ => { const LO: f32 = f32::MIN_POSITIVE * 2.; const HI: f32 = f32::MAX / 2.; diff --git a/library/core/src/slice/sort/select.rs b/library/core/src/slice/sort/select.rs index 3358c03d30a..c4808b1065d 100644 --- a/library/core/src/slice/sort/select.rs +++ b/library/core/src/slice/sort/select.rs @@ -6,6 +6,7 @@ //! for pivot selection. Using this as a fallback ensures O(n) worst case running time with //! better performance than one would get using heapsort as fallback. +use crate::cfg_match; use crate::mem::{self, SizedTypeProperties}; #[cfg(not(feature = "optimize_for_size"))] use crate::slice::sort::shared::pivot::choose_pivot; @@ -41,10 +42,11 @@ where let min_idx = min_index(v, &mut is_less).unwrap(); v.swap(min_idx, index); } else { - cfg_if! { - if #[cfg(feature = "optimize_for_size")] { + cfg_match! { + feature = "optimize_for_size" => { median_of_medians(v, &mut is_less, index); - } else { + } + _ => { partition_at_index_loop(v, index, None, &mut is_less); } } diff --git a/library/core/src/slice/sort/stable/mod.rs b/library/core/src/slice/sort/stable/mod.rs index 090367cdaba..a36e5f7801d 100644 --- a/library/core/src/slice/sort/stable/mod.rs +++ b/library/core/src/slice/sort/stable/mod.rs @@ -2,12 +2,12 @@ #[cfg(not(any(feature = "optimize_for_size", target_pointer_width = "16")))] use crate::cmp; -use crate::intrinsics; use crate::mem::{MaybeUninit, SizedTypeProperties}; #[cfg(not(any(feature = "optimize_for_size", target_pointer_width = "16")))] use crate::slice::sort::shared::smallsort::{ SMALL_SORT_GENERAL_SCRATCH_LEN, StableSmallSortTypeImpl, insertion_sort_shift_left, }; +use crate::{cfg_match, intrinsics}; pub(crate) mod merge; @@ -39,17 +39,18 @@ pub fn sort<T, F: FnMut(&T, &T) -> bool, BufT: BufGuard<T>>(v: &mut [T], is_less return; } - cfg_if! { - if #[cfg(any(feature = "optimize_for_size", target_pointer_width = "16"))] { + cfg_match! { + any(feature = "optimize_for_size", target_pointer_width = "16") => { // Unlike driftsort, mergesort only requires len / 2, // not len - len / 2. let alloc_len = len / 2; - cfg_if! { - if #[cfg(target_pointer_width = "16")] { + cfg_match! { + target_pointer_width = "16" => { let mut heap_buf = BufT::with_capacity(alloc_len); let scratch = heap_buf.as_uninit_slice_mut(); - } else { + } + _ => { // For small inputs 4KiB of stack storage suffices, which allows us to avoid // calling the (de-)allocator. Benchmarks showed this was quite beneficial. let mut stack_buf = AlignedStorage::<T, 4096>::new(); @@ -65,7 +66,8 @@ pub fn sort<T, F: FnMut(&T, &T) -> bool, BufT: BufGuard<T>>(v: &mut [T], is_less } tiny::mergesort(v, scratch, is_less); - } else { + } + _ => { // More advanced sorting methods than insertion sort are faster if called in // a hot loop for small inputs, but for general-purpose code the small // binary size of insertion sort is more important. The instruction cache in diff --git a/library/core/src/slice/sort/unstable/mod.rs b/library/core/src/slice/sort/unstable/mod.rs index 2eb653c4601..b6c2e05a06a 100644 --- a/library/core/src/slice/sort/unstable/mod.rs +++ b/library/core/src/slice/sort/unstable/mod.rs @@ -1,11 +1,11 @@ //! This module contains the entry points for `slice::sort_unstable`. -use crate::intrinsics; use crate::mem::SizedTypeProperties; #[cfg(not(any(feature = "optimize_for_size", target_pointer_width = "16")))] use crate::slice::sort::shared::find_existing_run; #[cfg(not(any(feature = "optimize_for_size", target_pointer_width = "16")))] use crate::slice::sort::shared::smallsort::insertion_sort_shift_left; +use crate::{cfg_match, intrinsics}; pub(crate) mod heapsort; pub(crate) mod quicksort; @@ -30,10 +30,11 @@ pub fn sort<T, F: FnMut(&T, &T) -> bool>(v: &mut [T], is_less: &mut F) { return; } - cfg_if! { - if #[cfg(any(feature = "optimize_for_size", target_pointer_width = "16"))] { + cfg_match! { + any(feature = "optimize_for_size", target_pointer_width = "16") => { heapsort::heapsort(v, is_less); - } else { + } + _ => { // More advanced sorting methods than insertion sort are faster if called in // a hot loop for small inputs, but for general-purpose code the small // binary size of insertion sort is more important. The instruction cache in diff --git a/library/core/src/slice/sort/unstable/quicksort.rs b/library/core/src/slice/sort/unstable/quicksort.rs index 68a16118716..7e6cfb55990 100644 --- a/library/core/src/slice/sort/unstable/quicksort.rs +++ b/library/core/src/slice/sort/unstable/quicksort.rs @@ -9,7 +9,7 @@ use crate::slice::sort::shared::pivot::choose_pivot; use crate::slice::sort::shared::smallsort::UnstableSmallSortTypeImpl; #[cfg(not(feature = "optimize_for_size"))] use crate::slice::sort::unstable::heapsort; -use crate::{intrinsics, ptr}; +use crate::{cfg_match, intrinsics, ptr}; /// Sorts `v` recursively. /// @@ -142,10 +142,11 @@ const fn inst_partition<T, F: FnMut(&T, &T) -> bool>() -> fn(&mut [T], &T, &mut if size_of::<T>() <= MAX_BRANCHLESS_PARTITION_SIZE { // Specialize for types that are relatively cheap to copy, where branchless optimizations // have large leverage e.g. `u64` and `String`. - cfg_if! { - if #[cfg(feature = "optimize_for_size")] { + cfg_match! { + feature = "optimize_for_size" => { partition_lomuto_branchless_simple::<T, F> - } else { + } + _ => { partition_lomuto_branchless_cyclic::<T, F> } } diff --git a/library/std/src/sys/pal/windows/pipe.rs b/library/std/src/sys/pal/windows/pipe.rs index 8521cf4162f..c7852464922 100644 --- a/library/std/src/sys/pal/windows/pipe.rs +++ b/library/std/src/sys/pal/windows/pipe.rs @@ -74,7 +74,6 @@ pub fn anon_pipe(ours_readable: bool, their_handle_inheritable: bool) -> io::Res let ours; let mut name; let mut tries = 0; - let mut reject_remote_clients_flag = c::PIPE_REJECT_REMOTE_CLIENTS; loop { tries += 1; name = format!( @@ -96,7 +95,7 @@ pub fn anon_pipe(ours_readable: bool, their_handle_inheritable: bool) -> io::Res c::PIPE_TYPE_BYTE | c::PIPE_READMODE_BYTE | c::PIPE_WAIT - | reject_remote_clients_flag, + | c::PIPE_REJECT_REMOTE_CLIENTS, 1, PIPE_BUFFER_CAPACITY, PIPE_BUFFER_CAPACITY, @@ -112,30 +111,15 @@ pub fn anon_pipe(ours_readable: bool, their_handle_inheritable: bool) -> io::Res // // Don't try again too much though as this could also perhaps be a // legit error. - // If `ERROR_INVALID_PARAMETER` is returned, this probably means we're - // running on pre-Vista version where `PIPE_REJECT_REMOTE_CLIENTS` is - // not supported, so we continue retrying without it. This implies - // reduced security on Windows versions older than Vista by allowing - // connections to this pipe from remote machines. - // Proper fix would increase the number of FFI imports and introduce - // significant amount of Windows XP specific code with no clean - // testing strategy - // For more info, see https://github.com/rust-lang/rust/pull/37677. if handle == c::INVALID_HANDLE_VALUE { let error = api::get_last_error(); - if tries < 10 { - if error == WinError::ACCESS_DENIED { - continue; - } else if reject_remote_clients_flag != 0 - && error == WinError::INVALID_PARAMETER - { - reject_remote_clients_flag = 0; - tries -= 1; - continue; - } + if tries < 10 && error == WinError::ACCESS_DENIED { + continue; + } else { + return Err(io::Error::from_raw_os_error(error.code as i32)); } - return Err(io::Error::from_raw_os_error(error.code as i32)); } + ours = Handle::from_raw_handle(handle); break; } diff --git a/library/std/src/thread/spawnhook.rs b/library/std/src/thread/spawnhook.rs index 99b5ad9cb9f..98f471ad54b 100644 --- a/library/std/src/thread/spawnhook.rs +++ b/library/std/src/thread/spawnhook.rs @@ -113,18 +113,23 @@ where pub(super) fn run_spawn_hooks(thread: &Thread) -> ChildSpawnHooks { // Get a snapshot of the spawn hooks. // (Increments the refcount to the first node.) - let hooks = SPAWN_HOOKS.with(|hooks| { + if let Ok(hooks) = SPAWN_HOOKS.try_with(|hooks| { let snapshot = hooks.take(); hooks.set(snapshot.clone()); snapshot - }); - // Iterate over the hooks, run them, and collect the results in a vector. - let to_run: Vec<_> = iter::successors(hooks.first.as_deref(), |hook| hook.next.as_deref()) - .map(|hook| (hook.hook)(thread)) - .collect(); - // Pass on the snapshot of the hooks and the results to the new thread, - // which will then run SpawnHookResults::run(). - ChildSpawnHooks { hooks, to_run } + }) { + // Iterate over the hooks, run them, and collect the results in a vector. + let to_run: Vec<_> = iter::successors(hooks.first.as_deref(), |hook| hook.next.as_deref()) + .map(|hook| (hook.hook)(thread)) + .collect(); + // Pass on the snapshot of the hooks and the results to the new thread, + // which will then run SpawnHookResults::run(). + ChildSpawnHooks { hooks, to_run } + } else { + // TLS has been destroyed. Skip running the hooks. + // See https://github.com/rust-lang/rust/issues/138696 + ChildSpawnHooks::default() + } } /// The results of running the spawn hooks. diff --git a/src/bootstrap/src/core/config/tests.rs b/src/bootstrap/src/core/config/tests.rs index 068e237c2cd..6d63709f474 100644 --- a/src/bootstrap/src/core/config/tests.rs +++ b/src/bootstrap/src/core/config/tests.rs @@ -24,31 +24,11 @@ pub(crate) fn parse(config: &str) -> Config { #[test] fn download_ci_llvm() { - let config = parse(""); - let is_available = llvm::is_ci_llvm_available_for_target(&config, config.llvm_assertions); - if is_available { - assert!(config.llvm_from_ci); - } - - let config = Config::parse_inner( - Flags::parse(&[ - "check".to_string(), - "--config=/does/not/exist".to_string(), - "--ci".to_string(), - "false".to_string(), - ]), - |&_| toml::from_str("llvm.download-ci-llvm = true"), - ); - let is_available = llvm::is_ci_llvm_available_for_target(&config, config.llvm_assertions); - if is_available { - assert!(config.llvm_from_ci); - } - let config = parse("llvm.download-ci-llvm = false"); assert!(!config.llvm_from_ci); let if_unchanged_config = parse("llvm.download-ci-llvm = \"if-unchanged\""); - if if_unchanged_config.llvm_from_ci { + if if_unchanged_config.llvm_from_ci && if_unchanged_config.is_running_on_ci { let has_changes = if_unchanged_config .last_modified_commit(LLVM_INVALIDATION_PATHS, "download-ci-llvm", true) .is_none(); diff --git a/src/bootstrap/src/utils/metrics.rs b/src/bootstrap/src/utils/metrics.rs index 885fff9c32c..862c4449624 100644 --- a/src/bootstrap/src/utils/metrics.rs +++ b/src/bootstrap/src/utils/metrics.rs @@ -9,9 +9,10 @@ use std::fs::File; use std::io::BufWriter; use std::time::{Duration, Instant, SystemTime}; +use build_helper::ci::CiEnv; use build_helper::metrics::{ - JsonInvocation, JsonInvocationSystemStats, JsonNode, JsonRoot, JsonStepSystemStats, Test, - TestOutcome, TestSuite, TestSuiteMetadata, + CiMetadata, JsonInvocation, JsonInvocationSystemStats, JsonNode, JsonRoot, JsonStepSystemStats, + Test, TestOutcome, TestSuite, TestSuiteMetadata, }; use sysinfo::{CpuRefreshKind, RefreshKind, System}; @@ -217,7 +218,12 @@ impl BuildMetrics { children: steps.into_iter().map(|step| self.prepare_json_step(step)).collect(), }); - let json = JsonRoot { format_version: CURRENT_FORMAT_VERSION, system_stats, invocations }; + let json = JsonRoot { + format_version: CURRENT_FORMAT_VERSION, + system_stats, + invocations, + ci_metadata: get_ci_metadata(CiEnv::current()), + }; t!(std::fs::create_dir_all(dest.parent().unwrap())); let mut file = BufWriter::new(t!(File::create(&dest))); @@ -245,6 +251,16 @@ impl BuildMetrics { } } +fn get_ci_metadata(ci_env: CiEnv) -> Option<CiMetadata> { + if ci_env != CiEnv::GitHubActions { + return None; + } + let workflow_run_id = + std::env::var("GITHUB_WORKFLOW_RUN_ID").ok().and_then(|id| id.parse::<u64>().ok())?; + let repository = std::env::var("GITHUB_REPOSITORY").ok()?; + Some(CiMetadata { workflow_run_id, repository }) +} + struct MetricsState { finished_steps: Vec<StepMetrics>, running_steps: Vec<StepMetrics>, diff --git a/src/build_helper/src/metrics.rs b/src/build_helper/src/metrics.rs index fdff9cd18ce..8b82e62a327 100644 --- a/src/build_helper/src/metrics.rs +++ b/src/build_helper/src/metrics.rs @@ -9,6 +9,19 @@ pub struct JsonRoot { pub format_version: usize, pub system_stats: JsonInvocationSystemStats, pub invocations: Vec<JsonInvocation>, + #[serde(default)] + pub ci_metadata: Option<CiMetadata>, +} + +/// Represents metadata about bootstrap's execution in CI. +#[derive(Serialize, Deserialize)] +pub struct CiMetadata { + /// GitHub run ID of the workflow where bootstrap was executed. + /// Note that the run ID will be shared amongst all jobs executed in that workflow. + pub workflow_run_id: u64, + /// Full name of a GitHub repository where bootstrap was executed in CI. + /// e.g. `rust-lang-ci/rust`. + pub repository: String, } #[derive(Serialize, Deserialize)] diff --git a/src/ci/citool/src/analysis.rs b/src/ci/citool/src/analysis.rs index 2b001f28b0e..7fbfad467c6 100644 --- a/src/ci/citool/src/analysis.rs +++ b/src/ci/citool/src/analysis.rs @@ -1,5 +1,6 @@ use std::collections::{BTreeMap, HashMap, HashSet}; use std::fmt::Debug; +use std::time::Duration; use build_helper::metrics::{ BuildStep, JsonRoot, TestOutcome, TestSuite, TestSuiteMetadata, escape_step_name, @@ -184,11 +185,70 @@ fn render_table(suites: BTreeMap<String, TestSuiteRecord>) -> String { } /// Outputs a report of test differences between the `parent` and `current` commits. -pub fn output_test_diffs(job_metrics: HashMap<JobName, JobMetrics>) { +pub fn output_test_diffs(job_metrics: &HashMap<JobName, JobMetrics>) { let aggregated_test_diffs = aggregate_test_diffs(&job_metrics); report_test_diffs(aggregated_test_diffs); } +/// Prints the ten largest differences in bootstrap durations. +pub fn output_largest_duration_changes(job_metrics: &HashMap<JobName, JobMetrics>) { + struct Entry<'a> { + job: &'a JobName, + before: Duration, + after: Duration, + change: f64, + } + + let mut changes: Vec<Entry> = vec![]; + for (job, metrics) in job_metrics { + if let Some(parent) = &metrics.parent { + let duration_before = parent + .invocations + .iter() + .map(|i| BuildStep::from_invocation(i).duration) + .sum::<Duration>(); + let duration_after = metrics + .current + .invocations + .iter() + .map(|i| BuildStep::from_invocation(i).duration) + .sum::<Duration>(); + let pct_change = duration_after.as_secs_f64() / duration_before.as_secs_f64(); + let pct_change = pct_change * 100.0; + // Normalize around 100, to get + for regression and - for improvements + let pct_change = pct_change - 100.0; + changes.push(Entry { + job, + before: duration_before, + after: duration_after, + change: pct_change, + }); + } + } + changes.sort_by(|e1, e2| e1.change.partial_cmp(&e2.change).unwrap().reverse()); + + println!("# Job duration changes"); + for (index, entry) in changes.into_iter().take(10).enumerate() { + println!( + "{}. `{}`: {:.1}s -> {:.1}s ({:.1}%)", + index + 1, + entry.job, + entry.before.as_secs_f64(), + entry.after.as_secs_f64(), + entry.change + ); + } + + println!(); + output_details("How to interpret the job duration changes?", || { + println!( + r#"Job durations can vary a lot, based on the actual runner instance +that executed the job, system noise, invalidated caches, etc. The table above is provided +mostly for t-infra members, for simpler debugging of potential CI slow-downs."# + ); + }); +} + #[derive(Default)] struct TestSuiteRecord { passed: u64, diff --git a/src/ci/citool/src/main.rs b/src/ci/citool/src/main.rs index 5f5c50dc43a..6db5eab458c 100644 --- a/src/ci/citool/src/main.rs +++ b/src/ci/citool/src/main.rs @@ -15,7 +15,7 @@ use clap::Parser; use jobs::JobDatabase; use serde_yaml::Value; -use crate::analysis::output_test_diffs; +use crate::analysis::{output_largest_duration_changes, output_test_diffs}; use crate::cpu_usage::load_cpu_usage; use crate::datadog::upload_datadog_metric; use crate::jobs::RunType; @@ -160,7 +160,7 @@ fn postprocess_metrics( job_name, JobMetrics { parent: Some(parent_metrics), current: metrics }, )]); - output_test_diffs(job_metrics); + output_test_diffs(&job_metrics); return Ok(()); } Err(error) => { @@ -180,7 +180,8 @@ fn post_merge_report(db: JobDatabase, current: String, parent: String) -> anyhow let metrics = download_auto_job_metrics(&db, &parent, ¤t)?; println!("\nComparing {parent} (parent) -> {current} (this PR)\n"); - output_test_diffs(metrics); + output_test_diffs(&metrics); + output_largest_duration_changes(&metrics); Ok(()) } diff --git a/src/ci/citool/src/metrics.rs b/src/ci/citool/src/metrics.rs index 086aa5009f3..a816fb3c4f1 100644 --- a/src/ci/citool/src/metrics.rs +++ b/src/ci/citool/src/metrics.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use std::path::Path; +use std::path::{Path, PathBuf}; use anyhow::Context; use build_helper::metrics::{JsonNode, JsonRoot, TestSuite}; @@ -74,6 +74,17 @@ Maybe it was newly added?"#, } pub fn download_job_metrics(job_name: &str, sha: &str) -> anyhow::Result<JsonRoot> { + // Best effort cache to speed-up local re-executions of citool + let cache_path = PathBuf::from(".citool-cache").join(sha).join(format!("{job_name}.json")); + if cache_path.is_file() { + if let Ok(metrics) = std::fs::read_to_string(&cache_path) + .map_err(|err| err.into()) + .and_then(|data| anyhow::Ok::<JsonRoot>(serde_json::from_str::<JsonRoot>(&data)?)) + { + return Ok(metrics); + } + } + let url = get_metrics_url(job_name, sha); let mut response = ureq::get(&url).call()?; if !response.status().is_success() { @@ -87,6 +98,13 @@ pub fn download_job_metrics(job_name: &str, sha: &str) -> anyhow::Result<JsonRoo .body_mut() .read_json() .with_context(|| anyhow::anyhow!("cannot deserialize metrics from {url}"))?; + + if let Ok(_) = std::fs::create_dir_all(cache_path.parent().unwrap()) { + if let Ok(data) = serde_json::to_string(&data) { + let _ = std::fs::write(cache_path, data); + } + } + Ok(data) } diff --git a/src/ci/citool/src/utils.rs b/src/ci/citool/src/utils.rs index b9b1bf4d455..a4c6ff85ef7 100644 --- a/src/ci/citool/src/utils.rs +++ b/src/ci/citool/src/utils.rs @@ -23,7 +23,6 @@ where println!( r"<details> <summary>{summary}</summary> - " ); func(); diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index 2805bb1118d..00d791eeb6b 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -355,6 +355,8 @@ docker \ --env GITHUB_ACTIONS \ --env GITHUB_REF \ --env GITHUB_STEP_SUMMARY="/checkout/obj/${SUMMARY_FILE}" \ + --env GITHUB_WORKFLOW_RUN_ID \ + --env GITHUB_REPOSITORY \ --env RUST_BACKTRACE \ --env TOOLSTATE_REPO_ACCESS_TOKEN \ --env TOOLSTATE_REPO \ diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md index b3bd44990e4..d08e0bd1edf 100644 --- a/src/doc/rustc/src/SUMMARY.md +++ b/src/doc/rustc/src/SUMMARY.md @@ -14,6 +14,22 @@ - [Deny-by-default Lints](lints/listing/deny-by-default.md) - [JSON Output](json.md) - [Tests](tests/index.md) +- [Targets](targets/index.md) + - [Built-in Targets](targets/built-in.md) + - [Custom Targets](targets/custom.md) + - [Known Issues](targets/known-issues.md) +- [Profile-guided Optimization](profile-guided-optimization.md) +- [Instrumentation-based Code Coverage](instrument-coverage.md) +- [Linker-plugin-based LTO](linker-plugin-lto.md) +- [Checking Conditional Configurations](check-cfg.md) + - [Cargo Specifics](check-cfg/cargo-specifics.md) +- [Exploit Mitigations](exploit-mitigations.md) +- [Symbol Mangling](symbol-mangling/index.md) + - [v0 Symbol Format](symbol-mangling/v0.md) +- [Contributing to `rustc`](contributing.md) + +-------- + - [Platform Support](platform-support.md) - [Target Tier Policy](target-tier-policy.md) - [Template for Target-specific Documentation](platform-support/TEMPLATE.md) @@ -66,6 +82,7 @@ - [m68k-unknown-none-elf](platform-support/m68k-unknown-none-elf.md) - [mips64-openwrt-linux-musl](platform-support/mips64-openwrt-linux-musl.md) - [mipsel-sony-psx](platform-support/mipsel-sony-psx.md) + - [mipsel-unknown-linux-gnu](platform-support/mipsel-unknown-linux-gnu.md) - [mips\*-mti-none-elf](platform-support/mips-mti-none-elf.md) - [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md) - [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md) @@ -114,16 +131,3 @@ - [x86_64-unknown-none](platform-support/x86_64-unknown-none.md) - [xtensa-\*-none-elf](platform-support/xtensa.md) - [\*-nuttx-\*](platform-support/nuttx.md) -- [Targets](targets/index.md) - - [Built-in Targets](targets/built-in.md) - - [Custom Targets](targets/custom.md) - - [Known Issues](targets/known-issues.md) -- [Profile-guided Optimization](profile-guided-optimization.md) -- [Instrumentation-based Code Coverage](instrument-coverage.md) -- [Linker-plugin-based LTO](linker-plugin-lto.md) -- [Checking Conditional Configurations](check-cfg.md) - - [Cargo Specifics](check-cfg/cargo-specifics.md) -- [Exploit Mitigations](exploit-mitigations.md) -- [Symbol Mangling](symbol-mangling/index.md) - - [v0 Symbol Format](symbol-mangling/v0.md) -- [Contributing to `rustc`](contributing.md) diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index 3a8f84069cc..bc97568f85c 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -334,7 +334,7 @@ target | std | host | notes `mips64el-unknown-linux-muslabi64` | ✓ | | MIPS64 (little endian) Linux, N64 ABI, musl 1.2.3 `mipsel-sony-psp` | * | | MIPS (LE) Sony PlayStation Portable (PSP) [`mipsel-sony-psx`](platform-support/mipsel-sony-psx.md) | * | | MIPS (LE) Sony PlayStation 1 (PSX) -`mipsel-unknown-linux-gnu` | ✓ | ✓ | MIPS (little endian) Linux (kernel 4.4, glibc 2.23) +[`mipsel-unknown-linux-gnu`](platform-support/mipsel-unknown-linux-gnu.md) | ✓ | ✓ | MIPS (little endian) Linux (kernel 4.4, glibc 2.23) `mipsel-unknown-linux-musl` | ✓ | | MIPS (little endian) Linux with musl 1.2.3 `mipsel-unknown-linux-uclibc` | ✓ | | MIPS (LE) Linux with uClibc [`mipsel-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | 32-bit MIPS (LE), requires mips32 cpu support diff --git a/src/doc/rustc/src/platform-support/apple-darwin.md b/src/doc/rustc/src/platform-support/apple-darwin.md index dba2c4b2aaf..22c54d04b1e 100644 --- a/src/doc/rustc/src/platform-support/apple-darwin.md +++ b/src/doc/rustc/src/platform-support/apple-darwin.md @@ -70,4 +70,5 @@ the `-mmacosx-version-min=...`, `-miphoneos-version-min=...` or similar flags to disambiguate. The path to the SDK can be passed to `rustc` using the common `SDKROOT` -environment variable. +environment variable, or will be inferred when compiling on host macOS using +roughly the same logic as `xcrun --sdk macosx --show-sdk-path`. diff --git a/src/doc/rustc/src/platform-support/apple-ios-macabi.md b/src/doc/rustc/src/platform-support/apple-ios-macabi.md index a54656190d1..79966d908d8 100644 --- a/src/doc/rustc/src/platform-support/apple-ios-macabi.md +++ b/src/doc/rustc/src/platform-support/apple-ios-macabi.md @@ -20,7 +20,8 @@ These targets are cross-compiled, and require the corresponding macOS SDK iOS-specific headers, as provided by Xcode 11 or higher. The path to the SDK can be passed to `rustc` using the common `SDKROOT` -environment variable. +environment variable, or will be inferred when compiling on host macOS using +roughly the same logic as `xcrun --sdk macosx --show-sdk-path`. ### OS version diff --git a/src/doc/rustc/src/platform-support/apple-ios.md b/src/doc/rustc/src/platform-support/apple-ios.md index cfb458fdb73..7f5dc361c49 100644 --- a/src/doc/rustc/src/platform-support/apple-ios.md +++ b/src/doc/rustc/src/platform-support/apple-ios.md @@ -26,7 +26,8 @@ These targets are cross-compiled, and require the corresponding iOS SDK ARM64 targets, Xcode 12 or higher is required. The path to the SDK can be passed to `rustc` using the common `SDKROOT` -environment variable. +environment variable, or will be inferred when compiling on host macOS using +roughly the same logic as `xcrun --sdk iphoneos --show-sdk-path`. ### OS version diff --git a/src/doc/rustc/src/platform-support/apple-tvos.md b/src/doc/rustc/src/platform-support/apple-tvos.md index 166bb1b6db2..fc46db20074 100644 --- a/src/doc/rustc/src/platform-support/apple-tvos.md +++ b/src/doc/rustc/src/platform-support/apple-tvos.md @@ -20,7 +20,8 @@ These targets are cross-compiled, and require the corresponding tvOS SDK ARM64 targets, Xcode 12 or higher is required. The path to the SDK can be passed to `rustc` using the common `SDKROOT` -environment variable. +environment variable, or will be inferred when compiling on host macOS using +roughly the same logic as `xcrun --sdk appletvos --show-sdk-path`. ### OS version diff --git a/src/doc/rustc/src/platform-support/apple-visionos.md b/src/doc/rustc/src/platform-support/apple-visionos.md index a7bbae168a4..7cf9549227d 100644 --- a/src/doc/rustc/src/platform-support/apple-visionos.md +++ b/src/doc/rustc/src/platform-support/apple-visionos.md @@ -18,7 +18,8 @@ These targets are cross-compiled, and require the corresponding visionOS SDK (`XROS.sdk` or `XRSimulator.sdk`), as provided by Xcode 15 or newer. The path to the SDK can be passed to `rustc` using the common `SDKROOT` -environment variable. +environment variable, or will be inferred when compiling on host macOS using +roughly the same logic as `xcrun --sdk xros --show-sdk-path`. ### OS version diff --git a/src/doc/rustc/src/platform-support/apple-watchos.md b/src/doc/rustc/src/platform-support/apple-watchos.md index 0bf8cdf3614..7b12d9ebfd4 100644 --- a/src/doc/rustc/src/platform-support/apple-watchos.md +++ b/src/doc/rustc/src/platform-support/apple-watchos.md @@ -24,7 +24,8 @@ These targets are cross-compiled, and require the corresponding watchOS SDK ARM64 targets, Xcode 12 or higher is required. The path to the SDK can be passed to `rustc` using the common `SDKROOT` -environment variable. +environment variable, or will be inferred when compiling on host macOS using +roughly the same logic as `xcrun --sdk watchos --show-sdk-path`. ### OS version diff --git a/src/doc/rustc/src/platform-support/mipsel-unknown-linux-gnu.md b/src/doc/rustc/src/platform-support/mipsel-unknown-linux-gnu.md new file mode 100644 index 00000000000..b1ee8728c02 --- /dev/null +++ b/src/doc/rustc/src/platform-support/mipsel-unknown-linux-gnu.md @@ -0,0 +1,28 @@ +# `mipsel-unknown-linux-gnu` + +**Tier: 3** + +Little-endian 32 bit MIPS for Linux with `glibc. + +## Target maintainers + +- [@LukasWoodtli](https://github.com/LukasWoodtli) + +## Requirements + +The target supports std on Linux. Host tools are supported but not tested. + + +## Building the target + +For cross compilation the GNU C compiler for the mipsel architecture needs to +be installed. On Ubuntu install the packets: `gcc-mipsel-linux-gnu` and +`g++-mipsel-linux-gnu`. + +Add `mipsel-unknown-linux-gnu` as `target` list in `config.toml`. + +## Building Rust programs + +Rust does not ship pre-compiled artifacts for this target. To compile for +this target, you will need to build Rust with the target enabled (see +"Building the target" above). diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs index 6ace626fdcd..a48f5c623cd 100644 --- a/src/librustdoc/clean/auto_trait.rs +++ b/src/librustdoc/clean/auto_trait.rs @@ -114,8 +114,8 @@ fn synthesize_auto_trait_impl<'tcx>( }; Some(clean::Item { - name: None, inner: Box::new(clean::ItemInner { + name: None, attrs: Default::default(), stability: None, kind: clean::ImplItem(Box::new(clean::Impl { @@ -127,10 +127,10 @@ fn synthesize_auto_trait_impl<'tcx>( polarity, kind: clean::ImplKind::Auto, })), + item_id: clean::ItemId::Auto { trait_: trait_def_id, for_: item_def_id }, + cfg: None, + inline_stmt_id: None, }), - item_id: clean::ItemId::Auto { trait_: trait_def_id, for_: item_def_id }, - cfg: None, - inline_stmt_id: None, }) } diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs index a6d9676dd84..89245fee515 100644 --- a/src/librustdoc/clean/blanket_impl.rs +++ b/src/librustdoc/clean/blanket_impl.rs @@ -83,9 +83,9 @@ pub(crate) fn synthesize_blanket_impls( cx.generated_synthetics.insert((ty.skip_binder(), trait_def_id)); blanket_impls.push(clean::Item { - name: None, - item_id: clean::ItemId::Blanket { impl_id: impl_def_id, for_: item_def_id }, inner: Box::new(clean::ItemInner { + name: None, + item_id: clean::ItemId::Blanket { impl_id: impl_def_id, for_: item_def_id }, attrs: Default::default(), stability: None, kind: clean::ImplItem(Box::new(clean::Impl { @@ -122,9 +122,9 @@ pub(crate) fn synthesize_blanket_impls( None, ))), })), + cfg: None, + inline_stmt_id: None, }), - cfg: None, - inline_stmt_id: None, }); } } diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 4fd669ab6d1..e0e09b53fc2 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -151,7 +151,7 @@ pub(crate) fn try_inline( let mut item = crate::clean::generate_item_with_correct_attrs(cx, kind, did, name, import_def_id, None); // The visibility needs to reflect the one from the reexport and not from the "source" DefId. - item.inline_stmt_id = import_def_id; + item.inner.inline_stmt_id = import_def_id; ret.push(item); Some(ret) } @@ -655,11 +655,11 @@ fn build_module_items( // Primitive types can't be inlined so generate an import instead. let prim_ty = clean::PrimitiveType::from(p); items.push(clean::Item { - name: None, - // We can use the item's `DefId` directly since the only information ever used - // from it is `DefId.krate`. - item_id: ItemId::DefId(did), inner: Box::new(clean::ItemInner { + name: None, + // We can use the item's `DefId` directly since the only information ever + // used from it is `DefId.krate`. + item_id: ItemId::DefId(did), attrs: Default::default(), stability: None, kind: clean::ImportItem(clean::Import::new_simple( @@ -679,9 +679,9 @@ fn build_module_items( }, true, )), + cfg: None, + inline_stmt_id: None, }), - cfg: None, - inline_stmt_id: None, }); } else if let Some(i) = try_inline(cx, res, item.ident.name, attrs, visited) { items.extend(i) diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index de6dc088176..c08ae168d69 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -210,7 +210,7 @@ fn generate_item_with_correct_attrs( let name = renamed.or(Some(name)); let mut item = Item::from_def_id_and_attrs_and_parts(def_id, name, kind, attrs, cfg); - item.inline_stmt_id = import_id; + item.inner.inline_stmt_id = import_id; item } @@ -1943,14 +1943,11 @@ fn clean_trait_object_lifetime_bound<'tcx>( // latter contrary to `clean_middle_region`. match *region { ty::ReStatic => Some(Lifetime::statik()), - ty::ReEarlyParam(region) if region.name != kw::Empty => Some(Lifetime(region.name)), - ty::ReBound(_, ty::BoundRegion { kind: ty::BoundRegionKind::Named(_, name), .. }) - if name != kw::Empty => - { + ty::ReEarlyParam(region) => Some(Lifetime(region.name)), + ty::ReBound(_, ty::BoundRegion { kind: ty::BoundRegionKind::Named(_, name), .. }) => { Some(Lifetime(name)) } - ty::ReEarlyParam(_) - | ty::ReBound(..) + ty::ReBound(..) | ty::ReLateParam(_) | ty::ReVar(_) | ty::RePlaceholder(_) @@ -2773,7 +2770,7 @@ fn add_without_unwanted_attributes<'hir>( if ident == sym::doc { filter_doc_attr(&mut normal.args, is_inline); attrs.push((Cow::Owned(attr), import_parent)); - } else if is_inline || ident != sym::cfg { + } else if is_inline || ident != sym::cfg_trace { // If it's not a `cfg()` attribute, we keep it. attrs.push((Cow::Owned(attr), import_parent)); } diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 27eb56a9858..143191a6f2a 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -311,26 +311,31 @@ pub(crate) enum ExternalLocation { /// directly to the AST's concept of an item; it's a strict superset. #[derive(Clone)] pub(crate) struct Item { - /// The name of this item. - /// Optional because not every item has a name, e.g. impls. - pub(crate) name: Option<Symbol>, pub(crate) inner: Box<ItemInner>, - pub(crate) item_id: ItemId, - /// This is the `LocalDefId` of the `use` statement if the item was inlined. - /// The crate metadata doesn't hold this information, so the `use` statement - /// always belongs to the current crate. - pub(crate) inline_stmt_id: Option<LocalDefId>, - pub(crate) cfg: Option<Arc<Cfg>>, } +// Why does the `Item`/`ItemInner` split exist? `Vec<Item>`s are common, and +// without the split `Item` would be a large type (100+ bytes) which results in +// lots of wasted space in the unused parts of a `Vec<Item>`. With the split, +// `Item` is just 8 bytes, and the wasted space is avoided, at the cost of an +// extra allocation per item. This is a performance win. #[derive(Clone)] pub(crate) struct ItemInner { + /// The name of this item. + /// Optional because not every item has a name, e.g. impls. + pub(crate) name: Option<Symbol>, /// Information about this item that is specific to what kind of item it is. /// E.g., struct vs enum vs function. pub(crate) kind: ItemKind, pub(crate) attrs: Attributes, /// The effective stability, filled out by the `propagate-stability` pass. pub(crate) stability: Option<Stability>, + pub(crate) item_id: ItemId, + /// This is the `LocalDefId` of the `use` statement if the item was inlined. + /// The crate metadata doesn't hold this information, so the `use` statement + /// always belongs to the current crate. + pub(crate) inline_stmt_id: Option<LocalDefId>, + pub(crate) cfg: Option<Arc<Cfg>>, } impl std::ops::Deref for Item { @@ -488,11 +493,15 @@ impl Item { trace!("name={name:?}, def_id={def_id:?} cfg={cfg:?}"); Item { - item_id: def_id.into(), - inner: Box::new(ItemInner { kind, attrs, stability: None }), - name, - cfg, - inline_stmt_id: None, + inner: Box::new(ItemInner { + item_id: def_id.into(), + kind, + attrs, + stability: None, + name, + cfg, + inline_stmt_id: None, + }), } } @@ -1059,7 +1068,7 @@ pub(crate) fn extract_cfg_from_attrs<'a, I: Iterator<Item = &'a hir::Attribute> // `doc(cfg())` overrides `cfg()`). attrs .clone() - .filter(|attr| attr.has_name(sym::cfg)) + .filter(|attr| attr.has_name(sym::cfg_trace)) .filter_map(|attr| single(attr.meta_item_list()?)) .filter_map(|attr| Cfg::parse_without(attr.meta_item()?, hidden_cfg).ok().flatten()) .fold(Cfg::True, |cfg, new_cfg| cfg & new_cfg) @@ -2622,13 +2631,14 @@ mod size_asserts { use super::*; // tidy-alphabetical-start - static_assert_size!(Crate, 56); // frequently moved by-value + static_assert_size!(Crate, 16); // frequently moved by-value static_assert_size!(DocFragment, 32); static_assert_size!(GenericArg, 32); static_assert_size!(GenericArgs, 24); static_assert_size!(GenericParamDef, 40); static_assert_size!(Generics, 16); - static_assert_size!(Item, 48); + static_assert_size!(Item, 8); + static_assert_size!(ItemInner, 136); static_assert_size!(ItemKind, 48); static_assert_size!(PathSegment, 32); static_assert_size!(Type, 32); diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs index e74fd67fbda..19402004ed5 100644 --- a/src/librustdoc/formats/cache.rs +++ b/src/librustdoc/formats/cache.rs @@ -385,7 +385,6 @@ impl DocFolder for CacheBuilder<'_, '_> { // implementations elsewhere. let ret = if let clean::Item { inner: box clean::ItemInner { kind: clean::ImplItem(ref i), .. }, - .. } = item { // Figure out the id of this impl. This may map to a diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs index a5351b350dd..9d8eb70fbe0 100644 --- a/src/librustdoc/json/conversions.rs +++ b/src/librustdoc/json/conversions.rs @@ -43,7 +43,7 @@ impl JsonRenderer<'_> { let attrs = item.attributes(self.tcx, self.cache(), true); let span = item.span(self.tcx); let visibility = item.visibility(self.tcx); - let clean::Item { name, item_id, .. } = item; + let clean::ItemInner { name, item_id, .. } = *item.inner; let id = self.id_from_item(&item); let inner = match item.kind { clean::KeywordItem => return None, diff --git a/src/librustdoc/passes/propagate_doc_cfg.rs b/src/librustdoc/passes/propagate_doc_cfg.rs index 572c9bf7552..eddafa9ba8e 100644 --- a/src/librustdoc/passes/propagate_doc_cfg.rs +++ b/src/librustdoc/passes/propagate_doc_cfg.rs @@ -61,7 +61,7 @@ impl CfgPropagator<'_, '_> { let (_, cfg) = merge_attrs(self.cx, item.attrs.other_attrs.as_slice(), Some((&attrs, None))); - item.cfg = cfg; + item.inner.cfg = cfg; } } @@ -71,7 +71,7 @@ impl DocFolder for CfgPropagator<'_, '_> { self.merge_with_parent_attributes(&mut item); - let new_cfg = match (self.parent_cfg.take(), item.cfg.take()) { + let new_cfg = match (self.parent_cfg.take(), item.inner.cfg.take()) { (None, None) => None, (Some(rc), None) | (None, Some(rc)) => Some(rc), (Some(mut a), Some(b)) => { @@ -81,7 +81,7 @@ impl DocFolder for CfgPropagator<'_, '_> { } }; self.parent_cfg = new_cfg.clone(); - item.cfg = new_cfg; + item.inner.cfg = new_cfg; let old_parent = if let Some(def_id) = item.item_id.as_def_id().and_then(|def_id| def_id.as_local()) { diff --git a/src/tools/cargo b/src/tools/cargo -Subproject 307cbfda3119f06600e43cd38283f4a746fe1f8 +Subproject a6c604d1b8a2f2a8ff1f3ba6092f9fda42f4b7e diff --git a/src/tools/clippy/clippy_lints/src/attrs/duplicated_attributes.rs b/src/tools/clippy/clippy_lints/src/attrs/duplicated_attributes.rs index 5c486eb90cc..4c84e61b1f2 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/duplicated_attributes.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/duplicated_attributes.rs @@ -37,7 +37,6 @@ fn check_duplicated_attr( let Some(ident) = attr.ident() else { return }; let name = ident.name; if name == sym::doc - || name == sym::cfg_attr || name == sym::cfg_attr_trace || name == sym::rustc_on_unimplemented || name == sym::reason { @@ -47,7 +46,7 @@ fn check_duplicated_attr( return; } if let Some(direct_parent) = parent.last() - && ["cfg", "cfg_attr"].contains(&direct_parent.as_str()) + && direct_parent == sym::cfg_trace.as_str() && [sym::all, sym::not, sym::any].contains(&name) { // FIXME: We don't correctly check `cfg`s for now, so if it's more complex than just a one diff --git a/src/tools/clippy/clippy_lints/src/cfg_not_test.rs b/src/tools/clippy/clippy_lints/src/cfg_not_test.rs index 84136a2e6c2..7590fe96fd2 100644 --- a/src/tools/clippy/clippy_lints/src/cfg_not_test.rs +++ b/src/tools/clippy/clippy_lints/src/cfg_not_test.rs @@ -32,7 +32,7 @@ declare_lint_pass!(CfgNotTest => [CFG_NOT_TEST]); impl EarlyLintPass for CfgNotTest { fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &rustc_ast::Attribute) { - if attr.has_name(rustc_span::sym::cfg) && contains_not_test(attr.meta_item_list().as_deref(), false) { + if attr.has_name(rustc_span::sym::cfg_trace) && contains_not_test(attr.meta_item_list().as_deref(), false) { span_lint_and_then( cx, CFG_NOT_TEST, diff --git a/src/tools/clippy/clippy_lints/src/methods/is_empty.rs b/src/tools/clippy/clippy_lints/src/methods/is_empty.rs index 7c190e123b7..4c81b22861b 100644 --- a/src/tools/clippy/clippy_lints/src/methods/is_empty.rs +++ b/src/tools/clippy/clippy_lints/src/methods/is_empty.rs @@ -41,7 +41,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &'_ Expr<'_>, receiver: &Expr<'_ fn is_under_cfg(cx: &LateContext<'_>, id: HirId) -> bool { cx.tcx .hir_parent_id_iter(id) - .any(|id| cx.tcx.hir_attrs(id).iter().any(|attr| attr.has_name(sym::cfg))) + .any(|id| cx.tcx.hir_attrs(id).iter().any(|attr| attr.has_name(sym::cfg_trace))) } /// Similar to [`clippy_utils::expr_or_init`], but does not go up the chain if the initialization diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs index 1307ff79bc5..668b0cb69e2 100644 --- a/src/tools/clippy/clippy_utils/src/lib.rs +++ b/src/tools/clippy/clippy_utils/src/lib.rs @@ -2629,7 +2629,7 @@ pub fn peel_ref_operators<'hir>(cx: &LateContext<'_>, mut expr: &'hir Expr<'hir> pub fn is_hir_ty_cfg_dependant(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> bool { if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind { if let Res::Def(_, def_id) = path.res { - return cx.tcx.has_attr(def_id, sym::cfg) || cx.tcx.has_attr(def_id, sym::cfg_attr); + return cx.tcx.has_attr(def_id, sym::cfg_trace) || cx.tcx.has_attr(def_id, sym::cfg_attr); } } false @@ -2699,7 +2699,7 @@ pub fn is_in_test_function(tcx: TyCtxt<'_>, id: HirId) -> bool { /// use [`is_in_cfg_test`] pub fn is_cfg_test(tcx: TyCtxt<'_>, id: HirId) -> bool { tcx.hir_attrs(id).iter().any(|attr| { - if attr.has_name(sym::cfg) + if attr.has_name(sym::cfg_trace) && let Some(items) = attr.meta_item_list() && let [item] = &*items && item.has_name(sym::test) @@ -2723,11 +2723,11 @@ pub fn is_in_test(tcx: TyCtxt<'_>, hir_id: HirId) -> bool { /// Checks if the item of any of its parents has `#[cfg(...)]` attribute applied. pub fn inherits_cfg(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { - tcx.has_attr(def_id, sym::cfg) + tcx.has_attr(def_id, sym::cfg_trace) || tcx .hir_parent_iter(tcx.local_def_id_to_hir_id(def_id)) .flat_map(|(parent_id, _)| tcx.hir_attrs(parent_id)) - .any(|attr| attr.has_name(sym::cfg)) + .any(|attr| attr.has_name(sym::cfg_trace)) } /// Walks up the HIR tree from the given expression in an attempt to find where the value is diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs index 3ec984edacb..950566b2582 100644 --- a/src/tools/compiletest/src/lib.rs +++ b/src/tools/compiletest/src/lib.rs @@ -22,6 +22,7 @@ pub mod util; use core::panic; use std::collections::HashSet; use std::ffi::OsString; +use std::fmt::Write; use std::io::{self, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; @@ -570,18 +571,22 @@ pub fn run_tests(config: Arc<Config>) { // easy to miss which tests failed, and as such fail to reproduce // the failure locally. - println!( - "Some tests failed in compiletest suite={}{} mode={} host={} target={}", - config.suite, - config - .compare_mode - .as_ref() - .map(|c| format!(" compare_mode={:?}", c)) - .unwrap_or_default(), - config.mode, - config.host, - config.target - ); + let mut msg = String::from("Some tests failed in compiletest"); + write!(msg, " suite={}", config.suite).unwrap(); + + if let Some(compare_mode) = config.compare_mode.as_ref() { + write!(msg, " compare_mode={}", compare_mode).unwrap(); + } + + if let Some(pass_mode) = config.force_pass_mode.as_ref() { + write!(msg, " pass_mode={}", pass_mode).unwrap(); + } + + write!(msg, " mode={}", config.mode).unwrap(); + write!(msg, " host={}", config.host).unwrap(); + write!(msg, " target={}", config.target).unwrap(); + + println!("{msg}"); std::process::exit(1); } diff --git a/tests/incremental/env/env_macro.rs b/tests/incremental/env/env_macro.rs new file mode 100644 index 00000000000..0c026328874 --- /dev/null +++ b/tests/incremental/env/env_macro.rs @@ -0,0 +1,18 @@ +// Check that changes to environment variables are propagated to `env!`. +// +// This test is intentionally written to not use any `#[cfg(rpass*)]`, to +// _really_ test that we re-compile if the environment variable changes. + +//@ revisions: cfail1 rpass2 rpass3 cfail4 +//@ [cfail1]unset-rustc-env:EXAMPLE_ENV +//@ [rpass2]rustc-env:EXAMPLE_ENV=one +//@ [rpass2]exec-env:EXAMPLE_ENV=one +//@ [rpass3]rustc-env:EXAMPLE_ENV=two +//@ [rpass3]exec-env:EXAMPLE_ENV=two +//@ [cfail4]unset-rustc-env:EXAMPLE_ENV + +fn main() { + assert_eq!(env!("EXAMPLE_ENV"), std::env::var("EXAMPLE_ENV").unwrap()); + //[cfail1]~^ ERROR environment variable `EXAMPLE_ENV` not defined at compile time + //[cfail4]~^^ ERROR environment variable `EXAMPLE_ENV` not defined at compile time +} diff --git a/tests/incremental/env/option_env_macro.rs b/tests/incremental/env/option_env_macro.rs new file mode 100644 index 00000000000..44c3bfd69e0 --- /dev/null +++ b/tests/incremental/env/option_env_macro.rs @@ -0,0 +1,18 @@ +// Check that changes to environment variables are propagated to `option_env!`. +// +// This test is intentionally written to not use any `#[cfg(rpass*)]`, to +// _really_ test that we re-compile if the environment variable changes. + +//@ revisions: rpass1 rpass2 rpass3 rpass4 +//@ [rpass1]unset-rustc-env:EXAMPLE_ENV +//@ [rpass1]unset-exec-env:EXAMPLE_ENV +//@ [rpass2]rustc-env:EXAMPLE_ENV=one +//@ [rpass2]exec-env:EXAMPLE_ENV=one +//@ [rpass3]rustc-env:EXAMPLE_ENV=two +//@ [rpass3]exec-env:EXAMPLE_ENV=two +//@ [rpass4]unset-rustc-env:EXAMPLE_ENV +//@ [rpass4]unset-exec-env:EXAMPLE_ENV + +fn main() { + assert_eq!(option_env!("EXAMPLE_ENV"), std::env::var("EXAMPLE_ENV").ok().as_deref()); +} diff --git a/tests/pretty/tests-are-sorted.pp b/tests/pretty/tests-are-sorted.pp index 31449b51dc3..d6a2c0ff979 100644 --- a/tests/pretty/tests-are-sorted.pp +++ b/tests/pretty/tests-are-sorted.pp @@ -10,7 +10,6 @@ extern crate std; //@ pp-exact:tests-are-sorted.pp extern crate test; -#[cfg(test)] #[rustc_test_marker = "m_test"] #[doc(hidden)] pub const m_test: test::TestDescAndFn = @@ -35,7 +34,6 @@ pub const m_test: test::TestDescAndFn = fn m_test() {} extern crate test; -#[cfg(test)] #[rustc_test_marker = "z_test"] #[doc(hidden)] pub const z_test: test::TestDescAndFn = @@ -61,7 +59,6 @@ pub const z_test: test::TestDescAndFn = fn z_test() {} extern crate test; -#[cfg(test)] #[rustc_test_marker = "a_test"] #[doc(hidden)] pub const a_test: test::TestDescAndFn = diff --git a/tests/ui-fulldeps/internal-lints/import-of-type-ir-traits.rs b/tests/ui-fulldeps/internal-lints/import-of-type-ir-traits.rs new file mode 100644 index 00000000000..3fdd65d6c87 --- /dev/null +++ b/tests/ui-fulldeps/internal-lints/import-of-type-ir-traits.rs @@ -0,0 +1,16 @@ +//@ compile-flags: -Z unstable-options +//@ ignore-stage1 + +#![feature(rustc_private)] +#![deny(rustc::usage_of_type_ir_traits)] + +extern crate rustc_type_ir; + +use rustc_type_ir::Interner; + +fn foo<I: Interner>(cx: I, did: I::DefId) { + let _ = cx.trait_is_unsafe(did); + //~^ ERROR do not use `rustc_type_ir::Interner` or `rustc_type_ir::InferCtxtLike` unless you're inside of the trait solver +} + +fn main() {} diff --git a/tests/ui-fulldeps/internal-lints/import-of-type-ir-traits.stderr b/tests/ui-fulldeps/internal-lints/import-of-type-ir-traits.stderr new file mode 100644 index 00000000000..df29a494558 --- /dev/null +++ b/tests/ui-fulldeps/internal-lints/import-of-type-ir-traits.stderr @@ -0,0 +1,15 @@ +error: do not use `rustc_type_ir::Interner` or `rustc_type_ir::InferCtxtLike` unless you're inside of the trait solver + --> $DIR/import-of-type-ir-traits.rs:12:13 + | +LL | let _ = cx.trait_is_unsafe(did); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the method or struct you're looking for is likely defined somewhere else downstream in the compiler +note: the lint level is defined here + --> $DIR/import-of-type-ir-traits.rs:5:9 + | +LL | #![deny(rustc::usage_of_type_ir_traits)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 1 previous error + diff --git a/tests/ui/codegen/empty-static-libs-issue-108825.rs b/tests/ui/codegen/empty-static-libs-issue-108825.rs new file mode 100644 index 00000000000..46bd6d6b2da --- /dev/null +++ b/tests/ui/codegen/empty-static-libs-issue-108825.rs @@ -0,0 +1,16 @@ +//! Test that linking a no_std application still outputs the +//! `native-static-libs: ` note, even though it is empty. + +//@ compile-flags: -Cpanic=abort --print=native-static-libs +//@ build-pass +//@ error-pattern: note: native-static-libs: +//@ dont-check-compiler-stderr (libcore links `/defaultlib:msvcrt` or `/defaultlib:libcmt` on MSVC) +//@ ignore-pass (the note is emitted later in the compilation pipeline, needs build) + +#![crate_type = "staticlib"] +#![no_std] + +#[panic_handler] +fn panic(_info: &core::panic::PanicInfo) -> ! { + loop {} +} diff --git a/tests/ui/conditional-compilation/cfg-attr-syntax-validation.rs b/tests/ui/conditional-compilation/cfg-attr-syntax-validation.rs index 9a041557c7c..416145a0c15 100644 --- a/tests/ui/conditional-compilation/cfg-attr-syntax-validation.rs +++ b/tests/ui/conditional-compilation/cfg-attr-syntax-validation.rs @@ -29,7 +29,6 @@ macro_rules! generate_s10 { ($expr: expr) => { #[cfg(feature = $expr)] //~^ ERROR expected unsuffixed literal, found expression `concat!("nonexistent")` - //~| ERROR expected unsuffixed literal, found expression `concat!("nonexistent")` struct S10; } } diff --git a/tests/ui/conditional-compilation/cfg-attr-syntax-validation.stderr b/tests/ui/conditional-compilation/cfg-attr-syntax-validation.stderr index 21a3712d939..d02d0d70a8b 100644 --- a/tests/ui/conditional-compilation/cfg-attr-syntax-validation.stderr +++ b/tests/ui/conditional-compilation/cfg-attr-syntax-validation.stderr @@ -65,19 +65,7 @@ LL | generate_s10!(concat!("nonexistent")); | = note: this error originates in the macro `generate_s10` (in Nightly builds, run with -Z macro-backtrace for more info) -error: expected unsuffixed literal, found expression `concat!("nonexistent")` - --> $DIR/cfg-attr-syntax-validation.rs:30:25 - | -LL | #[cfg(feature = $expr)] - | ^^^^^ -... -LL | generate_s10!(concat!("nonexistent")); - | ------------------------------------- in this macro invocation - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - = note: this error originates in the macro `generate_s10` (in Nightly builds, run with -Z macro-backtrace for more info) - -error: aborting due to 11 previous errors +error: aborting due to 10 previous errors Some errors have detailed explanations: E0537, E0565. For more information about an error, try `rustc --explain E0537`. diff --git a/tests/ui/conditional-compilation/invalid-node-range-issue-129166.rs b/tests/ui/conditional-compilation/invalid-node-range-issue-129166.rs index 794e6fad3fc..7c42be3ed4d 100644 --- a/tests/ui/conditional-compilation/invalid-node-range-issue-129166.rs +++ b/tests/ui/conditional-compilation/invalid-node-range-issue-129166.rs @@ -1,11 +1,12 @@ // This was triggering an assertion failure in `NodeRange::new`. +//@ check-pass + #![feature(cfg_eval)] #![feature(stmt_expr_attributes)] fn f() -> u32 { #[cfg_eval] #[cfg(not(FALSE))] 0 - //~^ ERROR removing an expression is not supported in this position } fn main() {} diff --git a/tests/ui/conditional-compilation/invalid-node-range-issue-129166.stderr b/tests/ui/conditional-compilation/invalid-node-range-issue-129166.stderr deleted file mode 100644 index 0699e182bd5..00000000000 --- a/tests/ui/conditional-compilation/invalid-node-range-issue-129166.stderr +++ /dev/null @@ -1,8 +0,0 @@ -error: removing an expression is not supported in this position - --> $DIR/invalid-node-range-issue-129166.rs:7:17 - | -LL | #[cfg_eval] #[cfg(not(FALSE))] 0 - | ^^^^^^^^^^^^^^^^^^ - -error: aborting due to 1 previous error - diff --git a/tests/ui/consts/assoc-const-elided-lifetime.stderr b/tests/ui/consts/assoc-const-elided-lifetime.stderr index 0c3e455eb2d..95821526835 100644 --- a/tests/ui/consts/assoc-const-elided-lifetime.stderr +++ b/tests/ui/consts/assoc-const-elided-lifetime.stderr @@ -35,8 +35,6 @@ note: cannot automatically infer `'static` because of other lifetimes in scope | LL | impl<'a> Foo<'a> { | ^^ -LL | const FOO: Foo<'_> = Foo { x: PhantomData::<&()> }; - | ^^ help: use the `'static` lifetime | LL | const BAR: &'static () = &(); diff --git a/tests/ui/consts/static-default-lifetime/elided-lifetime.rs b/tests/ui/consts/static-default-lifetime/elided-lifetime.rs index 95d59f9b894..ccf63f86fcf 100644 --- a/tests/ui/consts/static-default-lifetime/elided-lifetime.rs +++ b/tests/ui/consts/static-default-lifetime/elided-lifetime.rs @@ -16,7 +16,7 @@ impl Bar for Foo<'_> { const STATIC: &str = ""; //~^ ERROR `&` without an explicit lifetime name cannot be used here //~| WARN this was previously accepted by the compiler but is being phased out - //~| ERROR const not compatible with trait + //~| ERROR lifetime parameters or bounds on const `STATIC` do not match the trait declaration } fn main() {} diff --git a/tests/ui/consts/static-default-lifetime/elided-lifetime.stderr b/tests/ui/consts/static-default-lifetime/elided-lifetime.stderr index ec01225c6bf..33873f5c5a5 100644 --- a/tests/ui/consts/static-default-lifetime/elided-lifetime.stderr +++ b/tests/ui/consts/static-default-lifetime/elided-lifetime.stderr @@ -39,21 +39,15 @@ help: use the `'static` lifetime LL | const STATIC: &'static str = ""; | +++++++ -error[E0308]: const not compatible with trait - --> $DIR/elided-lifetime.rs:16:5 +error[E0195]: lifetime parameters or bounds on const `STATIC` do not match the trait declaration + --> $DIR/elided-lifetime.rs:16:17 | +LL | const STATIC: &str; + | - lifetimes in impl do not match this const in trait +... LL | const STATIC: &str = ""; - | ^^^^^^^^^^^^^^^^^^ lifetime mismatch - | - = note: expected reference `&'static _` - found reference `&_` -note: the anonymous lifetime as defined here... - --> $DIR/elided-lifetime.rs:16:19 - | -LL | const STATIC: &str = ""; - | ^ - = note: ...does not necessarily outlive the static lifetime + | ^ lifetimes do not match const in trait error: aborting due to 3 previous errors -For more information about this error, try `rustc --explain E0308`. +For more information about this error, try `rustc --explain E0195`. diff --git a/tests/ui/consts/static-default-lifetime/static-trait-impl.rs b/tests/ui/consts/static-default-lifetime/static-trait-impl.rs index 025fda4df58..b50bf01453d 100644 --- a/tests/ui/consts/static-default-lifetime/static-trait-impl.rs +++ b/tests/ui/consts/static-default-lifetime/static-trait-impl.rs @@ -9,7 +9,7 @@ impl Bar<'_> for A { const STATIC: &str = ""; //~^ ERROR `&` without an explicit lifetime name cannot be used here //~| WARN this was previously accepted by the compiler but is being phased out - //~| ERROR const not compatible with trait + //~| ERROR lifetime parameters or bounds on const `STATIC` do not match the trait declaration } struct B; diff --git a/tests/ui/consts/static-default-lifetime/static-trait-impl.stderr b/tests/ui/consts/static-default-lifetime/static-trait-impl.stderr index b8e2f412b49..116f28e8484 100644 --- a/tests/ui/consts/static-default-lifetime/static-trait-impl.stderr +++ b/tests/ui/consts/static-default-lifetime/static-trait-impl.stderr @@ -21,25 +21,15 @@ help: use the `'static` lifetime LL | const STATIC: &'static str = ""; | +++++++ -error[E0308]: const not compatible with trait - --> $DIR/static-trait-impl.rs:9:5 +error[E0195]: lifetime parameters or bounds on const `STATIC` do not match the trait declaration + --> $DIR/static-trait-impl.rs:9:17 | +LL | const STATIC: &'a str; + | - lifetimes in impl do not match this const in trait +... LL | const STATIC: &str = ""; - | ^^^^^^^^^^^^^^^^^^ lifetime mismatch - | - = note: expected reference `&_` - found reference `&_` -note: the anonymous lifetime as defined here... - --> $DIR/static-trait-impl.rs:9:19 - | -LL | const STATIC: &str = ""; - | ^ -note: ...does not necessarily outlive the anonymous lifetime as defined here - --> $DIR/static-trait-impl.rs:8:10 - | -LL | impl Bar<'_> for A { - | ^^ + | ^ lifetimes do not match const in trait error: aborting due to 2 previous errors -For more information about this error, try `rustc --explain E0308`. +For more information about this error, try `rustc --explain E0195`. diff --git a/tests/ui/mir/inline-causes-trimmed-paths.rs b/tests/ui/mir/inline-causes-trimmed-paths.rs new file mode 100644 index 00000000000..d626ab4e1d9 --- /dev/null +++ b/tests/ui/mir/inline-causes-trimmed-paths.rs @@ -0,0 +1,36 @@ +//@ build-pass +//@ compile-flags: -Zinline-mir + +trait Storage { + type Buffer: ?Sized; +} + +struct Array<const N: usize>; +impl<const N: usize> Storage for Array<N> { + type Buffer = [(); N]; +} + +struct Slice; +impl Storage for Slice { + type Buffer = [()]; +} + +struct Wrap<S: Storage> { + _b: S::Buffer, +} + +fn coerce<const N: usize>(this: &Wrap<Array<N>>) -> &Wrap<Slice> +where + Array<N>: Storage, +{ + coerce_again(this) +} + +fn coerce_again<const N: usize>(this: &Wrap<Array<N>>) -> &Wrap<Slice> { + this +} + +fn main() { + let inner: Wrap<Array<1>> = Wrap { _b: [(); 1] }; + let _: &Wrap<Slice> = coerce(&inner); +} diff --git a/tests/ui/mir/var_debug_ref.rs b/tests/ui/mir/var_debug_ref.rs new file mode 100644 index 00000000000..1dcf38b5bb9 --- /dev/null +++ b/tests/ui/mir/var_debug_ref.rs @@ -0,0 +1,24 @@ +// Regression test for #138942, where a function was incorrectly internalized, despite the fact +// that it was referenced by a var debug info from another code generation unit. +// +//@ build-pass +//@ revisions: limited full +//@ compile-flags: -Ccodegen-units=4 +//@[limited] compile-flags: -Cdebuginfo=limited +//@[full] compile-flags: -Cdebuginfo=full +trait Fun { + const FUN: &'static fn(); +} +impl Fun for () { + const FUN: &'static fn() = &(detail::f as fn()); +} +mod detail { + // Place `f` in a distinct module to generate a separate code generation unit. + #[inline(never)] + pub(super) fn f() {} +} +fn main() { + // SingleUseConsts represents "x" using VarDebugInfoContents::Const. + // It is the only reference to `f` remaining. + let x = <() as ::Fun>::FUN; +} diff --git a/tests/ui/parser/attribute/attr-bad-meta-4.rs b/tests/ui/parser/attribute/attr-bad-meta-4.rs index 2d0c6dbb50a..937390a6da5 100644 --- a/tests/ui/parser/attribute/attr-bad-meta-4.rs +++ b/tests/ui/parser/attribute/attr-bad-meta-4.rs @@ -2,7 +2,6 @@ macro_rules! mac { ($attr_item: meta) => { #[cfg($attr_item)] //~^ ERROR expected unsuffixed literal, found `meta` metavariable - //~| ERROR expected unsuffixed literal, found `meta` metavariable struct S; } } @@ -11,7 +10,6 @@ mac!(an(arbitrary token stream)); #[cfg(feature = -1)] //~^ ERROR expected unsuffixed literal, found `-` -//~| ERROR expected unsuffixed literal, found `-` fn handler() {} fn main() {} diff --git a/tests/ui/parser/attribute/attr-bad-meta-4.stderr b/tests/ui/parser/attribute/attr-bad-meta-4.stderr index dea574fd36d..9c6ab5adadf 100644 --- a/tests/ui/parser/attribute/attr-bad-meta-4.stderr +++ b/tests/ui/parser/attribute/attr-bad-meta-4.stderr @@ -1,5 +1,5 @@ error: expected unsuffixed literal, found `-` - --> $DIR/attr-bad-meta-4.rs:12:17 + --> $DIR/attr-bad-meta-4.rs:11:17 | LL | #[cfg(feature = -1)] | ^ @@ -15,25 +15,5 @@ LL | mac!(an(arbitrary token stream)); | = note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info) -error: expected unsuffixed literal, found `meta` metavariable - --> $DIR/attr-bad-meta-4.rs:3:15 - | -LL | #[cfg($attr_item)] - | ^^^^^^^^^^ -... -LL | mac!(an(arbitrary token stream)); - | -------------------------------- in this macro invocation - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - = note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info) - -error: expected unsuffixed literal, found `-` - --> $DIR/attr-bad-meta-4.rs:12:17 - | -LL | #[cfg(feature = -1)] - | ^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 4 previous errors +error: aborting due to 2 previous errors diff --git a/tests/ui/proc-macro/cfg-attr-trace.rs b/tests/ui/proc-macro/cfg-attr-trace.rs index b4927f7a730..140dd10a7e0 100644 --- a/tests/ui/proc-macro/cfg-attr-trace.rs +++ b/tests/ui/proc-macro/cfg-attr-trace.rs @@ -3,6 +3,7 @@ //@ check-pass //@ proc-macro: test-macros.rs +#![feature(cfg_boolean_literals)] #![feature(cfg_eval)] #[macro_use] @@ -10,8 +11,13 @@ extern crate test_macros; #[cfg_eval] #[test_macros::print_attr] -#[cfg_attr(FALSE, test_macros::print_attr)] -#[cfg_attr(all(), test_macros::print_attr)] +#[cfg_attr(false, test_macros::print_attr)] +#[cfg_attr(true, test_macros::print_attr)] struct S; +#[cfg_eval] +#[test_macros::print_attr] +#[cfg(true)] +struct Z; + fn main() {} diff --git a/tests/ui/proc-macro/cfg-attr-trace.stdout b/tests/ui/proc-macro/cfg-attr-trace.stdout index 394c3887fe7..52f9ff4e05c 100644 --- a/tests/ui/proc-macro/cfg-attr-trace.stdout +++ b/tests/ui/proc-macro/cfg-attr-trace.stdout @@ -4,59 +4,75 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: #0 bytes(271..272), + span: #0 bytes(305..306), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "test_macros", - span: #0 bytes(289..300), + span: #0 bytes(322..333), }, Punct { ch: ':', spacing: Joint, - span: #0 bytes(300..301), + span: #0 bytes(333..334), }, Punct { ch: ':', spacing: Alone, - span: #0 bytes(301..302), + span: #0 bytes(334..335), }, Ident { ident: "print_attr", - span: #0 bytes(302..312), + span: #0 bytes(335..345), }, ], - span: #0 bytes(272..314), + span: #0 bytes(306..347), }, Ident { ident: "struct", - span: #0 bytes(315..321), + span: #0 bytes(348..354), }, Ident { ident: "S", - span: #0 bytes(322..323), + span: #0 bytes(355..356), }, Punct { ch: ';', spacing: Alone, - span: #0 bytes(323..324), + span: #0 bytes(356..357), }, ] PRINT-ATTR INPUT (DISPLAY): struct S; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: #0 bytes(315..321), + span: #0 bytes(348..354), }, Ident { ident: "S", - span: #0 bytes(322..323), + span: #0 bytes(355..356), }, Punct { ch: ';', spacing: Alone, - span: #0 bytes(323..324), + span: #0 bytes(356..357), + }, +] +PRINT-ATTR INPUT (DISPLAY): struct Z; +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "struct", + span: #0 bytes(411..417), + }, + Ident { + ident: "Z", + span: #0 bytes(418..419), + }, + Punct { + ch: ';', + spacing: Alone, + span: #0 bytes(419..420), }, ] diff --git a/tests/ui/std/channel-stack-overflow-issue-102246.rs b/tests/ui/std/channel-stack-overflow-issue-102246.rs index 984ebdd553f..7bf6647bdc5 100644 --- a/tests/ui/std/channel-stack-overflow-issue-102246.rs +++ b/tests/ui/std/channel-stack-overflow-issue-102246.rs @@ -10,9 +10,16 @@ // Ref: https://github.com/rust-lang/rust/issues/102246 use std::sync::mpsc::channel; -use std::thread; +use std::thread::Builder; const N: usize = 32_768; +const SLOTS: usize = 32; +// Use a stack size that's smaller than N * SLOTS, proving the allocation is on the heap. +// +// The test explicitly specifies the stack size, because not all platforms have the same default +// size. +const STACK_SIZE: usize = (N*SLOTS) - 1; + struct BigStruct { _data: [u8; N], } @@ -20,10 +27,13 @@ struct BigStruct { fn main() { let (sender, receiver) = channel::<BigStruct>(); - let thread1 = thread::spawn(move || { + let thread1 = Builder::new().stack_size(STACK_SIZE).spawn(move || { sender.send(BigStruct { _data: [0u8; N] }).unwrap(); - }); - + }).expect("thread1 should spawn successfully"); thread1.join().unwrap(); - for _data in receiver.try_iter() {} + + let thread2 = Builder::new().stack_size(STACK_SIZE).spawn(move || { + for _data in receiver.try_iter() {} + }).expect("thread2 should spawn successfully"); + thread2.join().unwrap(); } diff --git a/tests/ui/structs/struct-construct-with-call-issue-138931.rs b/tests/ui/structs/struct-construct-with-call-issue-138931.rs new file mode 100644 index 00000000000..5d50eb14bff --- /dev/null +++ b/tests/ui/structs/struct-construct-with-call-issue-138931.rs @@ -0,0 +1,25 @@ +struct PersonOnlyName { + name: String +} + +struct PersonWithAge { + name: String, + age: u8, + height: u8, +} + + + +fn main() { + let wilfred = PersonOnlyName("Name1".to_owned()); + //~^ ERROR expected function, tuple struct or tuple variant, found struct `PersonOnlyName` [E0423] + + let bill = PersonWithAge( //~ ERROR expected function, tuple struct or tuple variant, found struct `PersonWithAge` [E0423] + "Name2".to_owned(), + 20, + 180, + ); + + let person = PersonWithAge("Name3".to_owned()); + //~^ ERROR expected function, tuple struct or tuple variant, found struct `PersonWithAge` [E0423] +} diff --git a/tests/ui/structs/struct-construct-with-call-issue-138931.stderr b/tests/ui/structs/struct-construct-with-call-issue-138931.stderr new file mode 100644 index 00000000000..acae01df563 --- /dev/null +++ b/tests/ui/structs/struct-construct-with-call-issue-138931.stderr @@ -0,0 +1,58 @@ +error[E0423]: expected function, tuple struct or tuple variant, found struct `PersonOnlyName` + --> $DIR/struct-construct-with-call-issue-138931.rs:14:19 + | +LL | / struct PersonOnlyName { +LL | | name: String +LL | | } + | |_- `PersonOnlyName` defined here +... +LL | let wilfred = PersonOnlyName("Name1".to_owned()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: use struct literal syntax instead of calling + | +LL - let wilfred = PersonOnlyName("Name1".to_owned()); +LL + let wilfred = PersonOnlyName{name: "Name1".to_owned()}; + | + +error[E0423]: expected function, tuple struct or tuple variant, found struct `PersonWithAge` + --> $DIR/struct-construct-with-call-issue-138931.rs:17:16 + | +LL | / struct PersonWithAge { +LL | | name: String, +LL | | age: u8, +LL | | height: u8, +LL | | } + | |_- `PersonWithAge` defined here +... +LL | let bill = PersonWithAge( + | ________________^ +LL | | "Name2".to_owned(), +LL | | 20, +LL | | 180, +LL | | ); + | |_____^ + | +help: use struct literal syntax instead of calling + | +LL ~ let bill = PersonWithAge{name: "Name2".to_owned(), +LL ~ age: 20, +LL ~ height: 180}; + | + +error[E0423]: expected function, tuple struct or tuple variant, found struct `PersonWithAge` + --> $DIR/struct-construct-with-call-issue-138931.rs:23:18 + | +LL | / struct PersonWithAge { +LL | | name: String, +LL | | age: u8, +LL | | height: u8, +LL | | } + | |_- `PersonWithAge` defined here +... +LL | let person = PersonWithAge("Name3".to_owned()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use struct literal syntax instead: `PersonWithAge { name: val, age: val, height: val }` + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0423`. diff --git a/tests/ui/thread-local/spawn-hook-atexit.rs b/tests/ui/thread-local/spawn-hook-atexit.rs new file mode 100644 index 00000000000..b084e0bb387 --- /dev/null +++ b/tests/ui/thread-local/spawn-hook-atexit.rs @@ -0,0 +1,24 @@ +// Regression test for https://github.com/rust-lang/rust/issues/138696 +//@ only-unix +//@ needs-threads +//@ run-pass + +#![feature(rustc_private)] + +extern crate libc; + +fn main() { + std::thread::spawn(|| { + unsafe { libc::atexit(spawn_in_atexit) }; + }) + .join() + .unwrap(); +} + +extern "C" fn spawn_in_atexit() { + std::thread::spawn(|| { + println!("Thread spawned in atexit"); + }) + .join() + .unwrap(); +} diff --git a/triagebot.toml b/triagebot.toml index 7e1e1bc5771..91b487bb96d 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -581,12 +581,12 @@ trigger_files = [ ] [notify-zulip."I-prioritize"] -zulip_stream = 245100 # #t-compiler/wg-prioritization/alerts +zulip_stream = 245100 # #t-compiler/prioritization/alerts topic = "#{number} {title}" message_on_add = """\ @*WG-prioritization/alerts* issue #{number} has been requested for prioritization. -# [Procedure](https://forge.rust-lang.org/compiler/prioritization/procedure.html#assign-priority-to-unprioritized-issues-with-i-prioritize-label) +# [Procedure](https://forge.rust-lang.org/compiler/prioritization.html) - Priority? - Regression? - Notify people/groups? |
